Commit b381a3a9 by zhangyannao

代码提交

parent 742d13ef
package com.gic.spark.entity;
import com.gic.spark.entity.request.AbstractFilterRequest;
/**
* Created by paste on 2021-03-11 13:51
*
* @author paste
*/
public class FilterProcessEntity {
String tagGroupId;
AbstractFilterRequest request;
public FilterProcessEntity(String tagGroupId, AbstractFilterRequest request) {
this.tagGroupId = tagGroupId;
this.request = request;
}
public String getTagGroupId() {
return tagGroupId;
}
public void setTagGroupId(String tagGroupId) {
this.tagGroupId = tagGroupId;
}
public AbstractFilterRequest getRequest() {
return request;
}
public void setRequest(AbstractFilterRequest request) {
this.request = request;
}
}
...@@ -3,14 +3,13 @@ package com.gic.spark.entity; ...@@ -3,14 +3,13 @@ package com.gic.spark.entity;
import com.gic.spark.entity.table.TabSceneCrowd; import com.gic.spark.entity.table.TabSceneCrowd;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List;
/** /**
* @description: * @description:
* @author: wangxk * @author: wangxk
* @date: 2020/4/16 * @date: 2020/4/16
*/ */
public class SceneCrowdDTO extends TabSceneCrowd{ public class SceneCrowdDTO extends TabSceneCrowd {
private LinkedList<TagConditionGroupDTO> conditionGroupDTOList; private LinkedList<TagConditionGroupDTO> conditionGroupDTOList;
...@@ -18,7 +17,7 @@ public class SceneCrowdDTO extends TabSceneCrowd{ ...@@ -18,7 +17,7 @@ public class SceneCrowdDTO extends TabSceneCrowd{
public SceneCrowdDTO() { public SceneCrowdDTO() {
} }
public SceneCrowdDTO(TabSceneCrowd sceneCrowd,LinkedList<TagConditionGroupDTO> conditionGroupDTOList){ public SceneCrowdDTO(TabSceneCrowd sceneCrowd, LinkedList<TagConditionGroupDTO> conditionGroupDTOList) {
this.setId(sceneCrowd.getId()); this.setId(sceneCrowd.getId());
this.setScene_Crowd_Name(sceneCrowd.getScene_Crowd_Name()); this.setScene_Crowd_Name(sceneCrowd.getScene_Crowd_Name());
this.setReal_Time(sceneCrowd.getReal_Time()); this.setReal_Time(sceneCrowd.getReal_Time());
......
...@@ -8,10 +8,9 @@ import java.sql.Timestamp; ...@@ -8,10 +8,9 @@ import java.sql.Timestamp;
* @author: wangxk * @author: wangxk
* @date: 2020/4/28 * @date: 2020/4/28
*/ */
public class TagCouponBean implements Serializable{ public class TagCouponBean implements Serializable {
private Long coupon_id; private Long coupon_id;
private Long ecu_Id;
private Long scu_Id; private Long scu_Id;
private Long acu_Id; private Long acu_Id;
private Long mcu_Id; private Long mcu_Id;
...@@ -26,14 +25,6 @@ public class TagCouponBean implements Serializable{ ...@@ -26,14 +25,6 @@ public class TagCouponBean implements Serializable{
this.coupon_id = coupon_id; this.coupon_id = coupon_id;
} }
public Long getEcu_Id() {
return ecu_Id;
}
public void setEcu_Id(Long ecu_Id) {
this.ecu_Id = ecu_Id;
}
public Long getScu_Id() { public Long getScu_Id() {
return scu_Id; return scu_Id;
} }
......
...@@ -13,7 +13,6 @@ public class TagIntegralBean implements Serializable { ...@@ -13,7 +13,6 @@ public class TagIntegralBean implements Serializable {
private Integer enterprise_Id; private Integer enterprise_Id;
private Integer cu_Type; private Integer cu_Type;
private Long cu_Id; private Long cu_Id;
private Long file_ecu_id;
private Integer integral_Value; private Integer integral_Value;
private java.sql.Timestamp create_Time; private java.sql.Timestamp create_Time;
private java.sql.Timestamp limit_time; private java.sql.Timestamp limit_time;
...@@ -42,14 +41,6 @@ public class TagIntegralBean implements Serializable { ...@@ -42,14 +41,6 @@ public class TagIntegralBean implements Serializable {
this.cu_Id = cu_Id; this.cu_Id = cu_Id;
} }
public Long getFile_ecu_id() {
return file_ecu_id;
}
public void setFile_ecu_id(Long file_ecu_id) {
this.file_ecu_id = file_ecu_id;
}
public Integer getIntegral_Value() { public Integer getIntegral_Value() {
return integral_Value; return integral_Value;
} }
......
package com.gic.spark.entity.bean;
import java.io.Serializable;
/**
* @description:
* @author: wangxk
* @date: 2020/5/7
*/
public class TrdEcuBrandBean extends TrdEcuSalesBeanBase implements Serializable {
private Long ent_brand_id;//商品品牌id
public Long getEnt_brand_id() {
return ent_brand_id;
}
public void setEnt_brand_id(Long ent_brand_id) {
this.ent_brand_id = ent_brand_id;
}
}
package com.gic.spark.entity.bean;
import java.io.Serializable;
/**
* @description:
* @author: wangxk
* @date: 2020/5/7
*/
public class TrdEcuBrandLabelBean implements Serializable {
private Integer ent_id; //企业id
private Long mbr_area_id;//卡域id
private Long ecu_id;//用户id
private Integer store_info_id;//门店id
private Integer order_times; //消费次数(根据配置取的)
private Integer seff_order_cnt;//配置订单数
private Integer seff_goods_num;//配置销售件数
private Double receive_amt;//应收额
private Double pay_amt;//实付额
private Double total_amt;//吊牌价总额
private Integer sleep_days;//休眠天数
private Long ent_brand_id;//商品品牌id
public Integer getEnt_id() {
return ent_id;
}
public void setEnt_id(Integer ent_id) {
this.ent_id = ent_id;
}
public Long getMbr_area_id() {
return mbr_area_id;
}
public void setMbr_area_id(Long mbr_area_id) {
this.mbr_area_id = mbr_area_id;
}
public Long getEcu_id() {
return ecu_id;
}
public void setEcu_id(Long ecu_id) {
this.ecu_id = ecu_id;
}
public Integer getStore_info_id() {
return store_info_id;
}
public void setStore_info_id(Integer store_info_id) {
this.store_info_id = store_info_id;
}
public Integer getOrder_times() {
return order_times;
}
public void setOrder_times(Integer order_times) {
this.order_times = order_times;
}
public Integer getSeff_goods_num() {
return seff_goods_num;
}
public void setSeff_goods_num(Integer seff_goods_num) {
this.seff_goods_num = seff_goods_num;
}
public Double getPay_amt() {
return pay_amt;
}
public void setPay_amt(Double pay_amt) {
this.pay_amt = pay_amt;
}
public Double getTotal_amt() {
return total_amt;
}
public void setTotal_amt(Double total_amt) {
this.total_amt = total_amt;
}
public Integer getSleep_days() {
return sleep_days;
}
public void setSleep_days(Integer sleep_days) {
this.sleep_days = sleep_days;
}
public Long getEnt_brand_id() {
return ent_brand_id;
}
public void setEnt_brand_id(Long ent_brand_id) {
this.ent_brand_id = ent_brand_id;
}
public Double getReceive_amt() {
return receive_amt;
}
public void setReceive_amt(Double receive_amt) {
this.receive_amt = receive_amt;
}
public Integer getSeff_order_cnt() {
return seff_order_cnt;
}
public void setSeff_order_cnt(Integer seff_order_cnt) {
this.seff_order_cnt = seff_order_cnt;
}
}
package com.gic.spark.entity.bean;
import java.io.Serializable;
/**
* @description:
* @author: wangxk
* @date: 2020/5/7
*/
public class TrdEcuChannelBean extends TrdEcuSalesBeanBase implements Serializable {
private Long channel_code;//渠道
public Long getChannel_code() {
return channel_code;
}
public void setChannel_code(Long channel_code) {
this.channel_code = channel_code;
}
}
...@@ -7,7 +7,7 @@ import java.io.Serializable; ...@@ -7,7 +7,7 @@ import java.io.Serializable;
* @author: wangxk * @author: wangxk
* @date: 2020/5/7 * @date: 2020/5/7
*/ */
public class TrdEcuSalesLabelBean implements Serializable { public class TrdEcuSalesBeanBase implements Serializable {
private Integer ent_id; //企业id private Integer ent_id; //企业id
private Long mbr_area_id;//卡域id private Long mbr_area_id;//卡域id
......
...@@ -3,7 +3,7 @@ package com.gic.spark.entity.table; ...@@ -3,7 +3,7 @@ package com.gic.spark.entity.table;
import java.io.Serializable; import java.io.Serializable;
public class TabDataActuallyPaidConfig implements Serializable{ public class TabDataActuallyPaidConfig implements Serializable {
private Integer actually_Paid_Config_Id; private Integer actually_Paid_Config_Id;
private Integer enterprise_Id; private Integer enterprise_Id;
......
package com.gic.spark.filter; package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceHive; import com.gic.spark.datasource.entity.DataSourceHive;
import com.gic.spark.entity.bean.TrdEcuBrandLabelBean; import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdEcuSalesLabelBean; import com.gic.spark.entity.bean.TrdEcuBrandBean;
import com.gic.spark.entity.bean.TrdEcuChannelBean;
import com.gic.spark.entity.bean.TrdEcuSalesBeanBase;
import com.gic.spark.entity.request.AbstractFilterRequest; import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeRequest; import com.gic.spark.entity.request.TagConsumeDoubleRequest;
import com.gic.spark.util.ConstantUtil; import com.gic.spark.util.ConstantUtil;
import com.google.common.collect.Lists;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.Optional; import org.apache.spark.api.java.Optional;
import scala.Tuple2; import scala.Tuple2;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Set;
/** /**
* @description: * @description:
...@@ -22,33 +26,73 @@ public abstract class AbstractTagConsumFilter implements BaseTagFilter { ...@@ -22,33 +26,73 @@ public abstract class AbstractTagConsumFilter implements BaseTagFilter {
DataSourceHive dataSourceHiveBrandLabel = new DataSourceHive(ConstantUtil.ADS_GIC_TRD_ECU_BRAND_LABEL_D); DataSourceHive dataSourceHiveBrandLabel = new DataSourceHive(ConstantUtil.ADS_GIC_TRD_ECU_BRAND_LABEL_D);
DataSourceHive dataSourceHiveSalesLabel = new DataSourceHive(ConstantUtil.ADS_GIC_TRD_ECU_SALES_LABEL_D); DataSourceHive dataSourceHiveSalesLabel = new DataSourceHive(ConstantUtil.ADS_GIC_TRD_ECU_SALES_LABEL_D);
protected static JavaRDD<TrdEcuSalesLabelBean> statisticsTypeHandle(JavaRDD<Tuple2<TrdEcuSalesLabelBean,Optional<Iterable<TrdEcuBrandLabelBean>>>>labelRDD, AbstractFilterRequest consumeRequest){ public static class ConsumeStatisticEntity {
JavaRDD<TrdEcuSalesLabelBean> consumeRDD=labelRDD.mapPartitions(data->{ Long ecuId;
List<TrdEcuSalesLabelBean> result=new ArrayList(); List<TrdEcuBrandBean> brandBeanList;
while (data.hasNext()){ List<TrdEcuChannelBean> channelBeanList;
Tuple2<TrdEcuSalesLabelBean,Optional<Iterable<TrdEcuBrandLabelBean>>> tp2=data.next(); }
TrdEcuSalesLabelBean consumeBean=tp2._1();
switch (consumeRequest.getStatisticsType()){ protected static List<TrdEcuSalesBeanBase> filterSalesBean(ConsumeStatisticEntity entity, AbstractFilterRequest consumeRequest) {
List<TrdEcuSalesBeanBase> result = new ArrayList<>();
switch (consumeRequest.statisticsType) {
case MCUINFO:
for (TrdEcuChannelBean bean : entity.channelBeanList) {
if (consumeRequest.getStatisticsValList().contains(String.valueOf(bean.getMbr_area_id()))) {
result.add(bean);
}
}
break;
case CHANNEL:
for (TrdEcuChannelBean bean : entity.channelBeanList) {
if (consumeRequest.getStatisticsValList().contains(String.valueOf(bean.getChannel_code()))) {
result.add(bean);
}
}
break;
case COMMODITYBRAND: case COMMODITYBRAND:
if(tp2._2().isPresent()){ for (TrdEcuBrandBean bean : entity.brandBeanList) {
for(TrdEcuBrandLabelBean brandLabelBean:tp2._2().get()){ if (consumeRequest.getStatisticsValList().contains(String.valueOf(bean.getEnt_brand_id()))) {
if(consumeRequest.getStatisticsValList().contains(String.valueOf(brandLabelBean.getEnt_brand_id()))){ result.add(bean);
result.add(brandLabelToSalesLabel(brandLabelBean)); }
}
break;
default:
break;
}
return result;
}
protected static JavaRDD<TrdEcuSalesBeanBase> statisticsTypeHandle(JavaRDD<Tuple2<TrdEcuSalesBeanBase, Optional<Iterable<TrdEcuBrandBean>>>> labelRDD, AbstractFilterRequest consumeRequest) {
JavaRDD<TrdEcuSalesBeanBase> consumeRDD = labelRDD.mapPartitions(data -> {
List<TrdEcuSalesBeanBase> result = new ArrayList();
while (data.hasNext()) {
Tuple2<TrdEcuSalesBeanBase, Optional<Iterable<TrdEcuBrandBean>>> tp2 = data.next();
TrdEcuSalesBeanBase consumeBean = tp2._1();
switch (consumeRequest.getStatisticsType()) {
case COMMODITYBRAND:
if (tp2._2().isPresent()) {
for (TrdEcuBrandBean brandLabelBean : tp2._2().get()) {
if (consumeRequest.getStatisticsValList().contains(String.valueOf(brandLabelBean.getEnt_brand_id()))) {
result.add(brandLabelBean);
} }
} }
} }
break; break;
//TODO: 渠道重做
case CHANNEL: case CHANNEL:
if(consumeRequest.getStatisticsValList().contains(String.valueOf(consumeBean.getStore_info_id()))){ if (consumeRequest.getStatisticsValList().contains(String.valueOf(consumeBean.getStore_info_id()))) {
result.add(consumeBean); result.add(consumeBean);
} }
break; break;
case MCUINFO: case MCUINFO:
if(consumeRequest.getStatisticsValList().contains(String.valueOf(consumeBean.getMbr_area_id()))){ if (consumeRequest.getStatisticsValList().contains(String.valueOf(consumeBean.getMbr_area_id()))) {
result.add(consumeBean); result.add(consumeBean);
} }
break; break;
default:break; default:
break;
} }
} }
return result.iterator(); return result.iterator();
...@@ -56,8 +100,28 @@ public abstract class AbstractTagConsumFilter implements BaseTagFilter { ...@@ -56,8 +100,28 @@ public abstract class AbstractTagConsumFilter implements BaseTagFilter {
return consumeRDD; return consumeRDD;
} }
protected static TrdEcuSalesLabelBean brandLabelToSalesLabel(TrdEcuBrandLabelBean brandLabelBean){ protected JavaRDD<ConsumeStatisticEntity> getConsumeEntity(Integer enterpriseId) {
TrdEcuSalesLabelBean salesLabelBean=new TrdEcuSalesLabelBean(); JavaRDD<TrdEcuChannelBean> channelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveSalesLabel.getDatasetByEntId(enterpriseId), TrdEcuChannelBean.class).javaRDD();
JavaRDD<TrdEcuBrandBean> brandRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveBrandLabel.getDatasetByEntId(enterpriseId), TrdEcuBrandBean.class).javaRDD();
JavaRDD<ConsumeStatisticEntity> rdd = channelRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data))
.groupByKey()
.leftOuterJoin(brandRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data)).groupByKey())
.map(data -> {
Long ecuId = data._1();
Optional<Iterable<TrdEcuBrandBean>> optional = data._2()._2();
ConsumeStatisticEntity entity = new ConsumeStatisticEntity();
entity.ecuId = ecuId;
entity.channelBeanList = Lists.newArrayList(data._2()._1());
entity.brandBeanList = optional.isPresent() ? Lists.newArrayList(optional.get()) : new ArrayList<>();
return entity;
});
return rdd;
}
protected static TrdEcuSalesBeanBase brandLabelToSalesLabel(TrdEcuBrandBean brandLabelBean) {
TrdEcuSalesBeanBase salesLabelBean = new TrdEcuSalesBeanBase();
salesLabelBean.setEnt_id(brandLabelBean.getEnt_id()); salesLabelBean.setEnt_id(brandLabelBean.getEnt_id());
salesLabelBean.setMbr_area_id(brandLabelBean.getMbr_area_id()); salesLabelBean.setMbr_area_id(brandLabelBean.getMbr_area_id());
salesLabelBean.setEcu_id(brandLabelBean.getEcu_id()); salesLabelBean.setEcu_id(brandLabelBean.getEcu_id());
...@@ -69,7 +133,43 @@ public abstract class AbstractTagConsumFilter implements BaseTagFilter { ...@@ -69,7 +133,43 @@ public abstract class AbstractTagConsumFilter implements BaseTagFilter {
salesLabelBean.setPay_amt(brandLabelBean.getPay_amt()); salesLabelBean.setPay_amt(brandLabelBean.getPay_amt());
salesLabelBean.setTotal_amt(brandLabelBean.getTotal_amt()); salesLabelBean.setTotal_amt(brandLabelBean.getTotal_amt());
salesLabelBean.setSleep_days(brandLabelBean.getSleep_days()); salesLabelBean.setSleep_days(brandLabelBean.getSleep_days());
return salesLabelBean; return salesLabelBean;
} }
protected static void handleValueCompare(Set<String> groupIds, TagConsumeDoubleRequest consumeRequest, String tagGroupId, double cusSinglePiece) {
switch (consumeRequest.getNumberType()) {
case gt:
if (cusSinglePiece > consumeRequest.getBeginNum()) {
groupIds.add(tagGroupId);
}
break;
case gte:
if (cusSinglePiece >= consumeRequest.getBeginNum()) {
groupIds.add(tagGroupId);
}
break;
case lt:
if (cusSinglePiece < consumeRequest.getEndNum()) {
groupIds.add(tagGroupId);
}
break;
case lte:
if (cusSinglePiece <= consumeRequest.getEndNum()) {
groupIds.add(tagGroupId);
}
break;
case eq:
if (cusSinglePiece == consumeRequest.getEqualNum()) {
groupIds.add(tagGroupId);
}
break;
case between:
if (cusSinglePiece >= consumeRequest.getBeginNum()
&& cusSinglePiece <= consumeRequest.getEndNum()) {
groupIds.add(tagGroupId);
}
default:
break;
}
}
} }
package com.gic.spark.filter; package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceHive; import com.gic.spark.datasource.entity.DataSourceHive;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdVirtualOrderBean; import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest; import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.AbstractFilterRequestTime; import com.gic.spark.entity.request.AbstractFilterRequestTime;
...@@ -8,6 +9,7 @@ import com.gic.spark.util.ConstantUtil; ...@@ -8,6 +9,7 @@ import com.gic.spark.util.ConstantUtil;
import com.gic.spark.util.DateUtil; import com.gic.spark.util.DateUtil;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.Optional; import org.apache.spark.api.java.Optional;
import org.apache.spark.sql.Row;
import scala.Tuple2; import scala.Tuple2;
import java.util.ArrayList; import java.util.ArrayList;
...@@ -25,6 +27,13 @@ public abstract class AbstractTagConsumRecordFilter implements BaseTagFilter { ...@@ -25,6 +27,13 @@ public abstract class AbstractTagConsumRecordFilter implements BaseTagFilter {
DataSourceHive dataSourceHiveOrder = new DataSourceHive(ConstantUtil.DWD_GIC_TRD_VIRTUAL_WDORDER_D); DataSourceHive dataSourceHiveOrder = new DataSourceHive(ConstantUtil.DWD_GIC_TRD_VIRTUAL_WDORDER_D);
DataSourceHive dataSourceHiveOrderItem = new DataSourceHive(ConstantUtil.DWD_GIC_TRD_VIRTUAL_ORDER_ITEM_D); DataSourceHive dataSourceHiveOrderItem = new DataSourceHive(ConstantUtil.DWD_GIC_TRD_VIRTUAL_ORDER_ITEM_D);
protected void getOrderRdds(int enterpriseId) {
JavaRDD<TrdVirtualOrderBean> consumeRecordRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId), TrdVirtualOrderBean.class).javaRDD();
JavaRDD<Row> virtualOrderItemRdd = dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId).select("virtual_order_id", "ent_brand_id").javaRDD();
}
protected static JavaRDD<TrdVirtualOrderBean> statisticsTypeHandle(JavaRDD<Tuple2<TrdVirtualOrderBean, Optional<Iterable<String>>>> orderRdd, AbstractFilterRequest request) { protected static JavaRDD<TrdVirtualOrderBean> statisticsTypeHandle(JavaRDD<Tuple2<TrdVirtualOrderBean, Optional<Iterable<String>>>> orderRdd, AbstractFilterRequest request) {
JavaRDD<TrdVirtualOrderBean> consumeRecordRDD = orderRdd.mapPartitions(data -> { JavaRDD<TrdVirtualOrderBean> consumeRecordRDD = orderRdd.mapPartitions(data -> {
List<TrdVirtualOrderBean> result = new ArrayList(); List<TrdVirtualOrderBean> result = new ArrayList();
......
package com.gic.spark.filter; package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity; import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.entity.FilterProcessEntity;
import com.gic.spark.entity.request.AbstractFilterRequest; import com.gic.spark.entity.request.AbstractFilterRequest;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import java.util.List; import java.util.List;
...@@ -16,4 +18,6 @@ public interface BaseTagFilter { ...@@ -16,4 +18,6 @@ public interface BaseTagFilter {
List<DataSourceEntity> necessarySourceList(); List<DataSourceEntity> necessarySourceList();
JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request); JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request);
JavaPairRDD<Long, String> filterValidMember(Integer enterpriseId, List<FilterProcessEntity> processEntityList);
} }
...@@ -2,35 +2,42 @@ package com.gic.spark.filter; ...@@ -2,35 +2,42 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity; import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager; import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdEcuBrandLabelBean; import com.gic.spark.entity.FilterProcessEntity;
import com.gic.spark.entity.bean.TrdEcuSalesLabelBean; import com.gic.spark.entity.bean.TrdEcuBrandBean;
import com.gic.spark.entity.bean.TrdEcuSalesBeanBase;
import com.gic.spark.entity.request.AbstractFilterRequest; import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeDoubleRequest; import com.gic.spark.entity.request.TagConsumeDoubleRequest;
import com.gic.spark.entity.request.TagConsumeRequest;
import com.gic.spark.util.CommonUtil; import com.gic.spark.util.CommonUtil;
import com.google.common.base.Joiner;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.Optional; import org.apache.spark.api.java.Optional;
import scala.Tuple2; import scala.Tuple2;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set;
/** /**
* @description: * @description: 连带率
* 连带率
* @author: wangxk * @author: wangxk
* @date: 2020/5/7 * @date: 2020/5/7
*/ */
public class TagAssociatedPurchaseRateFilter extends AbstractTagConsumFilter{ public class TagAssociatedPurchaseRateFilter extends AbstractTagConsumFilter {
private static TagAssociatedPurchaseRateFilter instance; private static TagAssociatedPurchaseRateFilter instance;
public static TagAssociatedPurchaseRateFilter getInstance() { public static TagAssociatedPurchaseRateFilter getInstance() {
if(null==instance){ if (null == instance) {
instance=new TagAssociatedPurchaseRateFilter(); instance = new TagAssociatedPurchaseRateFilter();
} }
return instance; return instance;
} }
private TagAssociatedPurchaseRateFilter(){}
private TagAssociatedPurchaseRateFilter() {
}
@Override @Override
public List<DataSourceEntity> necessarySourceList() { public List<DataSourceEntity> necessarySourceList() {
List<DataSourceEntity> result = new ArrayList(); List<DataSourceEntity> result = new ArrayList();
...@@ -41,64 +48,100 @@ public class TagAssociatedPurchaseRateFilter extends AbstractTagConsumFilter{ ...@@ -41,64 +48,100 @@ public class TagAssociatedPurchaseRateFilter extends AbstractTagConsumFilter{
@Override @Override
public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) { public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeDoubleRequest consumeRequest=(TagConsumeDoubleRequest)request; TagConsumeDoubleRequest consumeRequest = (TagConsumeDoubleRequest) request;
JavaRDD<TrdEcuSalesLabelBean> salesLabelRDD=MysqlRddManager.getPojoFromDataset(dataSourceHiveSalesLabel.getDatasetByEntId(enterpriseId), TrdEcuSalesLabelBean.class).javaRDD(); JavaRDD<TrdEcuSalesBeanBase> salesLabelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveSalesLabel.getDatasetByEntId(enterpriseId), TrdEcuSalesBeanBase.class).javaRDD();
JavaRDD<TrdEcuBrandLabelBean> brandLabelRDD=MysqlRddManager.getPojoFromDataset(dataSourceHiveBrandLabel.getDatasetByEntId(enterpriseId), TrdEcuBrandLabelBean.class).javaRDD(); JavaRDD<TrdEcuBrandBean> brandLabelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveBrandLabel.getDatasetByEntId(enterpriseId), TrdEcuBrandBean.class).javaRDD();
JavaRDD<Tuple2<TrdEcuSalesLabelBean,Optional<Iterable<TrdEcuBrandLabelBean>>>>labelRDD=salesLabelRDD.mapToPair(data->Tuple2.apply(data.getEcu_id(),data)) JavaRDD<Tuple2<TrdEcuSalesBeanBase, Optional<Iterable<TrdEcuBrandBean>>>> labelRDD = salesLabelRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data))
.leftOuterJoin(brandLabelRDD.mapToPair(data->Tuple2.apply(data.getEcu_id(),data)).groupByKey()) .leftOuterJoin(brandLabelRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data)).groupByKey())
.map(data->data._2()); .map(data -> data._2());
JavaRDD<TrdEcuSalesLabelBean> consumeRDD=statisticsTypeHandle(labelRDD,consumeRequest); JavaRDD<TrdEcuSalesBeanBase> consumeRDD = statisticsTypeHandle(labelRDD, consumeRequest);
JavaRDD<Long>ecuRdd=consumeRDD.mapToPair(data-> Tuple2.apply(data.getEcu_id(),data)) JavaRDD<Long> ecuRdd = consumeRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data))
.reduceByKey((x,y)->{ .reduceByKey((x, y) -> {
x.setSeff_goods_num(x.getSeff_goods_num()+y.getSeff_goods_num()); x.setSeff_goods_num(x.getSeff_goods_num() + y.getSeff_goods_num());
x.setSeff_order_cnt(x.getSeff_order_cnt()+y.getSeff_order_cnt()); x.setSeff_order_cnt(x.getSeff_order_cnt() + y.getSeff_order_cnt());
return x; return x;
}) })
.mapPartitions(data->{ .mapPartitions(data -> {
List<Long> result=new ArrayList(); List<Long> result = new ArrayList();
while (data.hasNext()){ while (data.hasNext()) {
Tuple2<Long,TrdEcuSalesLabelBean> tp2=data.next(); Tuple2<Long, TrdEcuSalesBeanBase> tp2 = data.next();
double jointRate= CommonUtil.isEmptyInteger2int(tp2._2().getSeff_goods_num())/CommonUtil.isEmptyInteger2int(tp2._2().getSeff_order_cnt()); double jointRate = CommonUtil.isEmptyInteger2int(tp2._2().getSeff_goods_num()) / CommonUtil.isEmptyInteger2int(tp2._2().getSeff_order_cnt());
switch (consumeRequest.getNumberType()){ switch (consumeRequest.getNumberType()) {
case gt: case gt:
if(jointRate>consumeRequest.getBeginNum()){ if (jointRate > consumeRequest.getBeginNum()) {
result.add(tp2._1()); result.add(tp2._1());
} }
break; break;
case gte: case gte:
if(jointRate>=consumeRequest.getBeginNum()){ if (jointRate >= consumeRequest.getBeginNum()) {
result.add(tp2._1()); result.add(tp2._1());
} }
break; break;
case lt: case lt:
if(jointRate<consumeRequest.getEndNum()){ if (jointRate < consumeRequest.getEndNum()) {
result.add(tp2._1()); result.add(tp2._1());
} }
break; break;
case lte: case lte:
if(jointRate<=consumeRequest.getEndNum()){ if (jointRate <= consumeRequest.getEndNum()) {
result.add(tp2._1()); result.add(tp2._1());
} }
break; break;
case eq: case eq:
if(jointRate==consumeRequest.getEqualNum()){ if (jointRate == consumeRequest.getEqualNum()) {
result.add(tp2._1()); result.add(tp2._1());
} }
break; break;
case between: case between:
if(jointRate>=consumeRequest.getBeginNum() if (jointRate >= consumeRequest.getBeginNum()
&&jointRate<=consumeRequest.getEndNum()){ && jointRate <= consumeRequest.getEndNum()) {
result.add(tp2._1()); result.add(tp2._1());
} }
default:break; default:
break;
} }
} }
return result.iterator(); return result.iterator();
}); });
return ecuRdd; return ecuRdd;
} }
@Override
public JavaPairRDD<Long, String> filterValidMember(Integer enterpriseId, List<FilterProcessEntity> processEntityList) {
JavaRDD<ConsumeStatisticEntity> consumeEntity = getConsumeEntity(enterpriseId);
JavaPairRDD<Long, String> resultRdd = consumeEntity.mapToPair(data -> {
Set<String> groupIds = new HashSet<>();
for (FilterProcessEntity entity : processEntityList) {
TagConsumeDoubleRequest consumeRequest = (TagConsumeDoubleRequest) entity.getRequest();
//过滤数据
List<TrdEcuSalesBeanBase> salseBeanList = filterSalesBean(data, consumeRequest);
if (salseBeanList.size() == 0) {
continue;
}
int goodsNum = 0;
int orderCount = 0;
for (TrdEcuSalesBeanBase beanBase : salseBeanList) {
goodsNum += beanBase.getSeff_goods_num();
orderCount += beanBase.getSeff_order_cnt();
}
double jointRate = goodsNum / (double) orderCount;
String tagGroupId = entity.getTagGroupId();
handleValueCompare(groupIds, consumeRequest, tagGroupId, jointRate);
}
return Tuple2.apply(data.ecuId, groupIds.size() > 0 ? Joiner.on(" ").join(groupIds) : null);
}).filter(data -> data._2() != null);
return resultRdd;
}
} }
...@@ -2,17 +2,22 @@ package com.gic.spark.filter; ...@@ -2,17 +2,22 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity; import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager; import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdEcuBrandLabelBean; import com.gic.spark.entity.FilterProcessEntity;
import com.gic.spark.entity.bean.TrdEcuSalesLabelBean; import com.gic.spark.entity.bean.TrdEcuBrandBean;
import com.gic.spark.entity.bean.TrdEcuSalesBeanBase;
import com.gic.spark.entity.request.AbstractFilterRequest; import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeDoubleRequest; import com.gic.spark.entity.request.TagConsumeDoubleRequest;
import com.gic.spark.util.CommonUtil; import com.gic.spark.util.CommonUtil;
import com.google.common.base.Joiner;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.Optional; import org.apache.spark.api.java.Optional;
import scala.Tuple2; import scala.Tuple2;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set;
/** /**
* @description: 平均折扣率 * @description: 平均折扣率
...@@ -45,14 +50,14 @@ public class TagAverageDiscountFactorFilter extends AbstractTagConsumFilter { ...@@ -45,14 +50,14 @@ public class TagAverageDiscountFactorFilter extends AbstractTagConsumFilter {
@Override @Override
public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) { public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeDoubleRequest consumeRequest = (TagConsumeDoubleRequest) request; TagConsumeDoubleRequest consumeRequest = (TagConsumeDoubleRequest) request;
JavaRDD<TrdEcuSalesLabelBean> salesLabelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveSalesLabel.getDatasetByEntId(enterpriseId), TrdEcuSalesLabelBean.class).javaRDD(); JavaRDD<TrdEcuSalesBeanBase> salesLabelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveSalesLabel.getDatasetByEntId(enterpriseId), TrdEcuSalesBeanBase.class).javaRDD();
JavaRDD<TrdEcuBrandLabelBean> brandLabelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveBrandLabel.getDatasetByEntId(enterpriseId), TrdEcuBrandLabelBean.class).javaRDD(); JavaRDD<TrdEcuBrandBean> brandLabelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveBrandLabel.getDatasetByEntId(enterpriseId), TrdEcuBrandBean.class).javaRDD();
JavaRDD<Tuple2<TrdEcuSalesLabelBean, Optional<Iterable<TrdEcuBrandLabelBean>>>> labelRDD = salesLabelRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data)) JavaRDD<Tuple2<TrdEcuSalesBeanBase, Optional<Iterable<TrdEcuBrandBean>>>> labelRDD = salesLabelRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data))
.leftOuterJoin(brandLabelRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data)).groupByKey()) .leftOuterJoin(brandLabelRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data)).groupByKey())
.map(data -> data._2()); .map(data -> data._2());
JavaRDD<TrdEcuSalesLabelBean> consumeRDD = statisticsTypeHandle(labelRDD, consumeRequest); JavaRDD<TrdEcuSalesBeanBase> consumeRDD = statisticsTypeHandle(labelRDD, consumeRequest);
int configStatus = CommonUtil.getConfigStatus(enterpriseId); int configStatus = CommonUtil.getConfigStatus(enterpriseId);
JavaRDD<Long> ecuRdd = consumeRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data)) JavaRDD<Long> ecuRdd = consumeRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data))
...@@ -65,7 +70,7 @@ public class TagAverageDiscountFactorFilter extends AbstractTagConsumFilter { ...@@ -65,7 +70,7 @@ public class TagAverageDiscountFactorFilter extends AbstractTagConsumFilter {
.mapPartitions(data -> { .mapPartitions(data -> {
List<Long> result = new ArrayList(); List<Long> result = new ArrayList();
while (data.hasNext()) { while (data.hasNext()) {
Tuple2<Long, TrdEcuSalesLabelBean> tp2 = data.next(); Tuple2<Long, TrdEcuSalesBeanBase> tp2 = data.next();
double avgDiscountRate = 1 == configStatus ? CommonUtil.isEmptyDouble2double(tp2._2().getPay_amt()) / CommonUtil.isEmptyDouble2double(tp2._2().getTotal_amt()) double avgDiscountRate = 1 == configStatus ? CommonUtil.isEmptyDouble2double(tp2._2().getPay_amt()) / CommonUtil.isEmptyDouble2double(tp2._2().getTotal_amt())
: CommonUtil.isEmptyDouble2double(tp2._2().getReceive_amt()) / CommonUtil.isEmptyDouble2double(tp2._2().getTotal_amt()); : CommonUtil.isEmptyDouble2double(tp2._2().getReceive_amt()) / CommonUtil.isEmptyDouble2double(tp2._2().getTotal_amt());
switch (consumeRequest.getNumberType()) { switch (consumeRequest.getNumberType()) {
...@@ -107,4 +112,39 @@ public class TagAverageDiscountFactorFilter extends AbstractTagConsumFilter { ...@@ -107,4 +112,39 @@ public class TagAverageDiscountFactorFilter extends AbstractTagConsumFilter {
}); });
return ecuRdd; return ecuRdd;
} }
@Override
public JavaPairRDD<Long, String> filterValidMember(Integer enterpriseId, List<FilterProcessEntity> processEntityList) {
JavaRDD<ConsumeStatisticEntity> entityJavaRDD = getConsumeEntity(enterpriseId);
int configStatus = CommonUtil.getConfigStatus(enterpriseId);
JavaPairRDD<Long, String> result = entityJavaRDD.mapToPair(data -> {
Set<String> groupIds = new HashSet<>();
for (FilterProcessEntity entity : processEntityList) {
TagConsumeDoubleRequest consumeRequest = (TagConsumeDoubleRequest) entity.getRequest();
List<TrdEcuSalesBeanBase> salesBeanList = filterSalesBean(data, entity.getRequest());
if (salesBeanList.size() == 0) {
continue;
}
double receiveAmount = 0;
double payAmount = 0;
double totalAmount = 0;
for (TrdEcuSalesBeanBase beanBase : salesBeanList) {
receiveAmount += beanBase.getReceive_amt();
payAmount += beanBase.getPay_amt();
totalAmount += beanBase.getTotal_amt();
}
double avgDiscountRate = 1 == configStatus ? payAmount / totalAmount
: receiveAmount / totalAmount;
String tagGroupId = entity.getTagGroupId();
handleValueCompare(groupIds, consumeRequest, tagGroupId, avgDiscountRate);
}
return Tuple2.apply(data.ecuId, groupIds.size() == 0 ? null : Joiner.on(" ").join(groupIds));
}).filter(data -> data._2() != null);
return result;
}
} }
...@@ -2,6 +2,7 @@ package com.gic.spark.filter; ...@@ -2,6 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity; import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager; import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.FilterProcessEntity;
import com.gic.spark.entity.bean.TrdVirtualOrderBean; import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.bean.TrdVirtualOrderItemBean; import com.gic.spark.entity.bean.TrdVirtualOrderItemBean;
import com.gic.spark.entity.request.AbstractFilterRequest; import com.gic.spark.entity.request.AbstractFilterRequest;
...@@ -48,10 +49,10 @@ public class TagConsumeCommodityFilter extends AbstractTagConsumRecordFilter { ...@@ -48,10 +49,10 @@ public class TagConsumeCommodityFilter extends AbstractTagConsumRecordFilter {
TagConsumeCommodityRequest commodityRequest = (TagConsumeCommodityRequest) request; TagConsumeCommodityRequest commodityRequest = (TagConsumeCommodityRequest) request;
JavaRDD<TrdVirtualOrderBean> consumeRecordRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId), TrdVirtualOrderBean.class).javaRDD(); JavaRDD<TrdVirtualOrderBean> consumeRecordRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId), TrdVirtualOrderBean.class).javaRDD();
Dataset<Row> OrderItemDS = dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId); Dataset<Row> orderItemDS = dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId);
JavaRDD<Tuple2<TrdVirtualOrderBean, Optional<Iterable<String>>>> orderAndItemRdd = consumeRecordRDD.mapToPair(data -> Tuple2.apply(data.getVirtual_id(), data)) JavaRDD<Tuple2<TrdVirtualOrderBean, Optional<Iterable<String>>>> orderAndItemRdd = consumeRecordRDD.mapToPair(data -> Tuple2.apply(data.getVirtual_id(), data))
.leftOuterJoin(OrderItemDS.select("virtual_order_id", "ent_brand_id").javaRDD() .leftOuterJoin(orderItemDS.select("virtual_order_id", "ent_brand_id").javaRDD()
.mapToPair(row -> Tuple2.apply(row.getLong(0), row.getString(1))) .mapToPair(row -> Tuple2.apply(row.getLong(0), row.getString(1)))
.groupByKey()) .groupByKey())
.map(data -> data._2()); .map(data -> data._2());
...@@ -61,7 +62,7 @@ public class TagConsumeCommodityFilter extends AbstractTagConsumRecordFilter { ...@@ -61,7 +62,7 @@ public class TagConsumeCommodityFilter extends AbstractTagConsumRecordFilter {
.filter(data -> checkTime(commodityRequest, DateUtil.strToDate(data.getReceipts_time(), DateUtil.FORMAT_DATETIME_19).getTime())) .filter(data -> checkTime(commodityRequest, DateUtil.strToDate(data.getReceipts_time(), DateUtil.FORMAT_DATETIME_19).getTime()))
.mapToPair(data -> Tuple2.apply(data.getVirtual_id(), data.getEcu_id())); .mapToPair(data -> Tuple2.apply(data.getVirtual_id(), data.getEcu_id()));
JavaPairRDD<Long, Long> orderItemRDD = MysqlRddManager.getPojoFromDataset(OrderItemDS, TrdVirtualOrderItemBean.class).javaRDD() JavaPairRDD<Long, Long> orderItemRDD = MysqlRddManager.getPojoFromDataset(orderItemDS, TrdVirtualOrderItemBean.class).javaRDD()
.filter(data -> { .filter(data -> {
if (StringUtils.isNotEmpty(data.getSku_code()) if (StringUtils.isNotEmpty(data.getSku_code())
&& commodityRequest.getSkuCodeList().contains(data.getSku_code())) { && commodityRequest.getSkuCodeList().contains(data.getSku_code())) {
...@@ -78,4 +79,10 @@ public class TagConsumeCommodityFilter extends AbstractTagConsumRecordFilter { ...@@ -78,4 +79,10 @@ public class TagConsumeCommodityFilter extends AbstractTagConsumRecordFilter {
return ecuRdd; return ecuRdd;
} }
@Override
public JavaPairRDD<Long, String> filterValidMember(Integer enterpriseId, List<FilterProcessEntity> processEntityList) {
return null;
}
} }
...@@ -7,7 +7,6 @@ import com.gic.spark.entity.request.AbstractFilterRequest; ...@@ -7,7 +7,6 @@ import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeTimeRequest; import com.gic.spark.entity.request.TagConsumeTimeRequest;
import com.gic.spark.util.DateUtil; import com.gic.spark.util.DateUtil;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.Optional; import org.apache.spark.api.java.Optional;
import org.apache.spark.sql.Column; import org.apache.spark.sql.Column;
...@@ -22,18 +21,19 @@ import java.util.List; ...@@ -22,18 +21,19 @@ import java.util.List;
* @author: wangxk * @author: wangxk
* @date: 2020/8/10 * @date: 2020/8/10
*/ */
public class TagConsumeTimeFilter extends AbstractTagConsumRecordFilter{ public class TagConsumeTimeFilter extends AbstractTagConsumRecordFilter {
private static TagConsumeTimeFilter instance; private static TagConsumeTimeFilter instance;
public static TagConsumeTimeFilter getInstance() { public static TagConsumeTimeFilter getInstance() {
if(null==instance){ if (null == instance) {
instance=new TagConsumeTimeFilter(); instance = new TagConsumeTimeFilter();
} }
return instance; return instance;
} }
private TagConsumeTimeFilter(){} private TagConsumeTimeFilter() {
}
@Override @Override
public List<DataSourceEntity> necessarySourceList() { public List<DataSourceEntity> necessarySourceList() {
...@@ -45,22 +45,22 @@ public class TagConsumeTimeFilter extends AbstractTagConsumRecordFilter{ ...@@ -45,22 +45,22 @@ public class TagConsumeTimeFilter extends AbstractTagConsumRecordFilter{
@Override @Override
public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) { public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeTimeRequest consumeTimeRequest=(TagConsumeTimeRequest)request; TagConsumeTimeRequest consumeTimeRequest = (TagConsumeTimeRequest) request;
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId) JavaRDD<TrdVirtualOrderBean> consumeRecordRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId)
.filter(new Column("is_eff_order").equalTo(1)), TrdVirtualOrderBean.class).javaRDD(); .filter(new Column("is_eff_order").equalTo(1)), TrdVirtualOrderBean.class).javaRDD();
JavaRDD<Row>virtualOrderItemRdd=dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId).select("virtual_order_id","ent_brand_id").javaRDD(); JavaRDD<Row> virtualOrderItemRdd = dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId).select("virtual_order_id", "ent_brand_id").javaRDD();
JavaRDD<Tuple2<TrdVirtualOrderBean,Optional<Iterable<String>>>>orderRdd=consumeRecordRDD.mapToPair(data->Tuple2.apply(data.getVirtual_id(),data)) JavaRDD<Tuple2<TrdVirtualOrderBean, Optional<Iterable<String>>>> orderRdd = consumeRecordRDD.mapToPair(data -> Tuple2.apply(data.getVirtual_id(), data))
.leftOuterJoin(virtualOrderItemRdd.mapToPair(row->Tuple2.apply(row.getLong(0),row.getString(1))).groupByKey()) .leftOuterJoin(virtualOrderItemRdd.mapToPair(row -> Tuple2.apply(row.getLong(0), row.getString(1))).groupByKey())
.map(data->data._2()); .map(data -> data._2());
consumeRecordRDD=statisticsTypeHandle(orderRdd,consumeTimeRequest); consumeRecordRDD = statisticsTypeHandle(orderRdd, consumeTimeRequest);
JavaRDD<Long>ecuRdd=consumeRecordRDD.filter(data-> StringUtils.isNotEmpty(data.getReceipts_time())) JavaRDD<Long> ecuRdd = consumeRecordRDD.filter(data -> StringUtils.isNotEmpty(data.getReceipts_time()))
.mapToPair(data-> Tuple2.apply(data.getEcu_id(), DateUtil.strToDate(data.getReceipts_time(),DateUtil.FORMAT_DATETIME_19))) .mapToPair(data -> Tuple2.apply(data.getEcu_id(), DateUtil.strToDate(data.getReceipts_time(), DateUtil.FORMAT_DATETIME_19)))
.filter(data->checkTime(consumeTimeRequest,data._2().getTime())) .filter(data -> checkTime(consumeTimeRequest, data._2().getTime()))
.reduceByKey((x,y)->x) .reduceByKey((x, y) -> x)
.map(data->data._1()); .map(data -> data._1());
return ecuRdd; return ecuRdd;
} }
} }
...@@ -25,13 +25,16 @@ import java.util.List; ...@@ -25,13 +25,16 @@ import java.util.List;
public class TagConsumeTotalFilter extends AbstractTagConsumRecordFilter { public class TagConsumeTotalFilter extends AbstractTagConsumRecordFilter {
private static TagConsumeTotalFilter instance; private static TagConsumeTotalFilter instance;
public static TagConsumeTotalFilter getInstance(){
if(null==instance){ public static TagConsumeTotalFilter getInstance() {
instance=new TagConsumeTotalFilter(); if (null == instance) {
instance = new TagConsumeTotalFilter();
} }
return instance; return instance;
} }
private TagConsumeTotalFilter(){}
private TagConsumeTotalFilter() {
}
@Override @Override
public List<DataSourceEntity> necessarySourceList() { public List<DataSourceEntity> necessarySourceList() {
...@@ -43,77 +46,79 @@ public class TagConsumeTotalFilter extends AbstractTagConsumRecordFilter { ...@@ -43,77 +46,79 @@ public class TagConsumeTotalFilter extends AbstractTagConsumRecordFilter {
@Override @Override
public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) { public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeAmountRequest consumeAmountRequest=(TagConsumeAmountRequest)request; TagConsumeAmountRequest consumeAmountRequest = (TagConsumeAmountRequest) request;
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD=MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId), TrdVirtualOrderBean.class).javaRDD(); JavaRDD<TrdVirtualOrderBean> consumeRecordRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId), TrdVirtualOrderBean.class).javaRDD();
JavaRDD<Row>virtualOrderItemRdd=dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId).select("virtual_order_id","ent_brand_id").javaRDD(); JavaRDD<Row> virtualOrderItemRdd = dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId).select("virtual_order_id", "ent_brand_id").javaRDD();
JavaRDD<Tuple2<TrdVirtualOrderBean,Optional<Iterable<String>>>>orderRdd=consumeRecordRDD.mapToPair(data->Tuple2.apply(data.getVirtual_id(),data)) JavaRDD<Tuple2<TrdVirtualOrderBean, Optional<Iterable<String>>>> orderRdd = consumeRecordRDD.mapToPair(data -> Tuple2.apply(data.getVirtual_id(), data))
.leftOuterJoin(virtualOrderItemRdd.mapToPair(row->Tuple2.apply(row.getLong(0),row.getString(1))).groupByKey()) .leftOuterJoin(virtualOrderItemRdd.mapToPair(row -> Tuple2.apply(row.getLong(0), row.getString(1))).groupByKey())
.map(data->data._2()); .map(data -> data._2());
consumeRecordRDD=statisticsTypeHandle(orderRdd,consumeAmountRequest); consumeRecordRDD = statisticsTypeHandle(orderRdd, consumeAmountRequest);
int configStatus= CommonUtil.getConfigStatus(enterpriseId); int configStatus = CommonUtil.getConfigStatus(enterpriseId);
JavaRDD<Long>ecuRdd=consumeRecordRDD.filter(data->{ JavaRDD<Long> ecuRdd = consumeRecordRDD.filter(data -> {
boolean result=false; boolean result = false;
if(StringUtils.isNotEmpty(data.getReceipts_time())){ if (StringUtils.isNotEmpty(data.getReceipts_time())) {
Date receiptsTime=DateUtil.strToDate(data.getReceipts_time(),DateUtil.FORMAT_DATE_10); Date receiptsTime = DateUtil.strToDate(data.getReceipts_time(), DateUtil.FORMAT_DATE_10);
switch (consumeAmountRequest.getTimeRangeType()){ switch (consumeAmountRequest.getTimeRangeType()) {
case FIXATION: case FIXATION:
if(receiptsTime.getTime()>=consumeAmountRequest.getBeginTime().getTime() if (receiptsTime.getTime() >= consumeAmountRequest.getBeginTime().getTime()
&&receiptsTime.getTime()<=consumeAmountRequest.getEndTime().getTime()) && receiptsTime.getTime() <= consumeAmountRequest.getEndTime().getTime())
result=true; result = true;
break; break;
case LATELY: case LATELY:
if(receiptsTime.getTime()>DateUtil.addNumForDay(new Date(),-consumeAmountRequest.getTimeNum()).getTime()){ if (receiptsTime.getTime() > DateUtil.addNumForDay(new Date(), -consumeAmountRequest.getTimeNum()).getTime()) {
result=true; result = true;
} }
break; break;
default:break; default:
break;
} }
} }
return result; return result;
}).mapToPair(data-> Tuple2.apply(data.getEcu_id(),configStatus==1?data.getPaid_amt():data.getPay_amt())) }).mapToPair(data -> Tuple2.apply(data.getEcu_id(), configStatus == 1 ? data.getPaid_amt() : data.getPay_amt()))
.reduceByKey((x,y)->x+y) .reduceByKey((x, y) -> x + y)
.filter(data->{ .filter(data -> {
boolean result=false; boolean result = false;
switch (consumeAmountRequest.getNumberType()){ switch (consumeAmountRequest.getNumberType()) {
case between: case between:
if(data._2()>=consumeAmountRequest.getBeginNum() if (data._2() >= consumeAmountRequest.getBeginNum()
&&data._2()<=consumeAmountRequest.getEndNum()){ && data._2() <= consumeAmountRequest.getEndNum()) {
result=true; result = true;
} }
break; break;
case lt: case lt:
if(data._2()<consumeAmountRequest.getEndNum()){ if (data._2() < consumeAmountRequest.getEndNum()) {
result=true; result = true;
} }
break; break;
case gt: case gt:
if(data._2()>consumeAmountRequest.getBeginNum()){ if (data._2() > consumeAmountRequest.getBeginNum()) {
result=true; result = true;
} }
break; break;
case eq: case eq:
if(data._2()==consumeAmountRequest.getEqualNum()){ if (data._2() == consumeAmountRequest.getEqualNum()) {
result=true; result = true;
} }
break; break;
case lte: case lte:
if(data._2()<=consumeAmountRequest.getEndNum()){ if (data._2() <= consumeAmountRequest.getEndNum()) {
result=true; result = true;
} }
break; break;
case gte: case gte:
if(data._2()>=consumeAmountRequest.getBeginNum()){ if (data._2() >= consumeAmountRequest.getBeginNum()) {
result=true; result = true;
} }
break; break;
default:break; default:
break;
} }
return result; return result;
}).map(data->data._1()); }).map(data -> data._1());
return ecuRdd; return ecuRdd;
} }
......
...@@ -2,30 +2,35 @@ package com.gic.spark.filter; ...@@ -2,30 +2,35 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity; import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager; import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdEcuBrandLabelBean; import com.gic.spark.entity.FilterProcessEntity;
import com.gic.spark.entity.bean.TrdEcuSalesLabelBean; import com.gic.spark.entity.bean.TrdEcuBrandBean;
import com.gic.spark.entity.bean.TrdEcuSalesBeanBase;
import com.gic.spark.entity.request.AbstractFilterRequest; import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeDoubleRequest;
import com.gic.spark.entity.request.TagConsumeRequest; import com.gic.spark.entity.request.TagConsumeRequest;
import com.google.common.base.Joiner;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.Optional; import org.apache.spark.api.java.Optional;
import scala.Tuple2; import scala.Tuple2;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set;
/** /**
* @description: * @description: 消费休眠天数
* 消费休眠天数
* @author: wangxk * @author: wangxk
* @date: 2020/5/8 * @date: 2020/5/8
*/ */
public class TagConsumptionSleepDaysFilter extends AbstractTagConsumFilter{ public class TagConsumptionSleepDaysFilter extends AbstractTagConsumFilter {
private static TagConsumptionSleepDaysFilter instance; private static TagConsumptionSleepDaysFilter instance;
public static TagConsumptionSleepDaysFilter getInstance() { public static TagConsumptionSleepDaysFilter getInstance() {
if(null==instance){ if (null == instance) {
instance=new TagConsumptionSleepDaysFilter(); instance = new TagConsumptionSleepDaysFilter();
} }
return instance; return instance;
} }
...@@ -40,58 +45,87 @@ public class TagConsumptionSleepDaysFilter extends AbstractTagConsumFilter{ ...@@ -40,58 +45,87 @@ public class TagConsumptionSleepDaysFilter extends AbstractTagConsumFilter{
@Override @Override
public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) { public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeRequest consumeRequest=(TagConsumeRequest)request; TagConsumeRequest consumeRequest = (TagConsumeRequest) request;
JavaRDD<TrdEcuSalesLabelBean> salesLabelRDD=MysqlRddManager.getPojoFromDataset(dataSourceHiveSalesLabel.getDatasetByEntId(enterpriseId), TrdEcuSalesLabelBean.class).javaRDD(); JavaRDD<TrdEcuSalesBeanBase> salesLabelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveSalesLabel.getDatasetByEntId(enterpriseId), TrdEcuSalesBeanBase.class).javaRDD();
JavaRDD<TrdEcuBrandLabelBean> brandLabelRDD=MysqlRddManager.getPojoFromDataset(dataSourceHiveBrandLabel.getDatasetByEntId(enterpriseId), TrdEcuBrandLabelBean.class).javaRDD(); JavaRDD<TrdEcuBrandBean> brandLabelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveBrandLabel.getDatasetByEntId(enterpriseId), TrdEcuBrandBean.class).javaRDD();
JavaRDD<Tuple2<TrdEcuSalesLabelBean,Optional<Iterable<TrdEcuBrandLabelBean>>>>labelRDD=salesLabelRDD.mapToPair(data->Tuple2.apply(data.getEcu_id(),data)) JavaRDD<Tuple2<TrdEcuSalesBeanBase, Optional<Iterable<TrdEcuBrandBean>>>> labelRDD = salesLabelRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data))
.leftOuterJoin(brandLabelRDD.mapToPair(data->Tuple2.apply(data.getEcu_id(),data)).groupByKey()) .leftOuterJoin(brandLabelRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data)).groupByKey())
.map(data->data._2()); .map(data -> data._2());
JavaRDD<TrdEcuSalesLabelBean> consumeRDD=statisticsTypeHandle(labelRDD,consumeRequest); JavaRDD<TrdEcuSalesBeanBase> consumeRDD = statisticsTypeHandle(labelRDD, consumeRequest);
JavaRDD<Long>ecuRdd=consumeRDD.mapToPair(data-> Tuple2.apply(data.getEcu_id(),data.getSleep_days())) JavaRDD<Long> ecuRdd = consumeRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data.getSleep_days()))
.mapPartitionsToPair(data->{ .mapPartitionsToPair(data -> {
List<Tuple2<Long,Integer>>result=new ArrayList(); List<Tuple2<Long, Integer>> result = new ArrayList();
while (data.hasNext()){ while (data.hasNext()) {
Tuple2<Long,Integer>tp2=data.next(); Tuple2<Long, Integer> tp2 = data.next();
switch (consumeRequest.getNumberType()){ switch (consumeRequest.getNumberType()) {
case gt: case gt:
if(tp2._2()>consumeRequest.getBeginNum()){ if (tp2._2() > consumeRequest.getBeginNum()) {
result.add(tp2); result.add(tp2);
} }
break; break;
case gte: case gte:
if(tp2._2()>=consumeRequest.getBeginNum()){ if (tp2._2() >= consumeRequest.getBeginNum()) {
result.add(tp2); result.add(tp2);
} }
break; break;
case lt: case lt:
if(tp2._2()<consumeRequest.getEndNum()){ if (tp2._2() < consumeRequest.getEndNum()) {
result.add(tp2); result.add(tp2);
} }
break; break;
case lte: case lte:
if(tp2._2()<=consumeRequest.getEndNum()){ if (tp2._2() <= consumeRequest.getEndNum()) {
result.add(tp2); result.add(tp2);
} }
break; break;
case eq: case eq:
if(tp2._2()==consumeRequest.getEqualNum()){ if (tp2._2() == consumeRequest.getEqualNum()) {
result.add(tp2); result.add(tp2);
} }
break; break;
case between: case between:
if(tp2._2()>=consumeRequest.getBeginNum() if (tp2._2() >= consumeRequest.getBeginNum()
&&tp2._2()<=consumeRequest.getEndNum()){ && tp2._2() <= consumeRequest.getEndNum()) {
result.add(tp2); result.add(tp2);
} }
default:break; default:
break;
} }
} }
return result.iterator(); return result.iterator();
}) })
.reduceByKey((x,y)->x) .reduceByKey((x, y) -> x)
.map(data->data._1()); .map(Tuple2::_1);
return ecuRdd; return ecuRdd;
} }
@Override
public JavaPairRDD<Long, String> filterValidMember(Integer enterpriseId, List<FilterProcessEntity> processEntityList) {
JavaRDD<ConsumeStatisticEntity> entityJavaRDD = getConsumeEntity(enterpriseId);
JavaPairRDD<Long, String> rdd = entityJavaRDD.mapToPair(data -> {
Set<String> groupIds = new HashSet<>();
for (FilterProcessEntity entity : processEntityList) {
TagConsumeDoubleRequest consumeRequest = (TagConsumeDoubleRequest) entity.getRequest();
List<TrdEcuSalesBeanBase> salseBeanList = filterSalesBean(data, entity.getRequest());
if (salseBeanList.size() == 0) {
continue;
}
String tagGroupId = entity.getTagGroupId();
for (TrdEcuSalesBeanBase beanBase : salseBeanList) {
Integer sleepDays = beanBase.getSleep_days();
handleValueCompare(groupIds, consumeRequest, tagGroupId, sleepDays);
}
}
return Tuple2.apply(data.ecuId, groupIds.size() == 0 ? null : Joiner.on(" ").join(groupIds));
}).filter(data -> data._2() != null);
return rdd;
}
} }
...@@ -2,32 +2,41 @@ package com.gic.spark.filter; ...@@ -2,32 +2,41 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity; import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager; import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdEcuBrandLabelBean; import com.gic.spark.entity.FilterProcessEntity;
import com.gic.spark.entity.bean.TrdEcuSalesLabelBean; import com.gic.spark.entity.bean.TrdEcuBrandBean;
import com.gic.spark.entity.bean.TrdEcuSalesBeanBase;
import com.gic.spark.entity.request.AbstractFilterRequest; import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeDoubleRequest;
import com.gic.spark.entity.request.TagConsumeRequest; import com.gic.spark.entity.request.TagConsumeRequest;
import com.google.common.base.Joiner;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.Optional; import org.apache.spark.api.java.Optional;
import scala.Tuple2; import scala.Tuple2;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set;
/** /**
* @description: * @description: 消费次数
*消费次数
* @author: wangxk * @author: wangxk
* @date: 2020/5/6 * @date: 2020/5/6
*/ */
public class TagConsumptionTimeFilter extends AbstractTagConsumFilter{ public class TagConsumptionTimeFilter extends AbstractTagConsumFilter {
private static TagConsumptionTimeFilter instance; private static TagConsumptionTimeFilter instance;
public static TagConsumptionTimeFilter getInstance(){
if(null==instance){ public static TagConsumptionTimeFilter getInstance() {
instance=new TagConsumptionTimeFilter(); if (null == instance) {
instance = new TagConsumptionTimeFilter();
} }
return instance; return instance;
} }
private TagConsumptionTimeFilter(){}
private TagConsumptionTimeFilter() {
}
@Override @Override
public List<DataSourceEntity> necessarySourceList() { public List<DataSourceEntity> necessarySourceList() {
List<DataSourceEntity> result = new ArrayList(); List<DataSourceEntity> result = new ArrayList();
...@@ -38,56 +47,83 @@ public class TagConsumptionTimeFilter extends AbstractTagConsumFilter{ ...@@ -38,56 +47,83 @@ public class TagConsumptionTimeFilter extends AbstractTagConsumFilter{
@Override @Override
public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) { public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeRequest consumeRequest=(TagConsumeRequest)request; TagConsumeRequest consumeRequest = (TagConsumeRequest) request;
JavaRDD<TrdEcuSalesLabelBean> salesLabelRDD=MysqlRddManager.getPojoFromDataset(dataSourceHiveSalesLabel.getDatasetByEntId(enterpriseId), TrdEcuSalesLabelBean.class).javaRDD(); JavaRDD<TrdEcuSalesBeanBase> salesLabelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveSalesLabel.getDatasetByEntId(enterpriseId), TrdEcuSalesBeanBase.class).javaRDD();
JavaRDD<TrdEcuBrandLabelBean> brandLabelRDD=MysqlRddManager.getPojoFromDataset(dataSourceHiveBrandLabel.getDatasetByEntId(enterpriseId), TrdEcuBrandLabelBean.class).javaRDD(); JavaRDD<TrdEcuBrandBean> brandLabelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveBrandLabel.getDatasetByEntId(enterpriseId), TrdEcuBrandBean.class).javaRDD();
JavaRDD<Tuple2<TrdEcuSalesLabelBean,Optional<Iterable<TrdEcuBrandLabelBean>>>>labelRDD=salesLabelRDD.mapToPair(data->Tuple2.apply(data.getEcu_id(),data)) JavaRDD<Tuple2<TrdEcuSalesBeanBase, Optional<Iterable<TrdEcuBrandBean>>>> labelRDD = salesLabelRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data))
.leftOuterJoin(brandLabelRDD.mapToPair(data->Tuple2.apply(data.getEcu_id(),data)).groupByKey()) .leftOuterJoin(brandLabelRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data)).groupByKey())
.map(data->data._2()); .map(data -> data._2());
JavaRDD<TrdEcuSalesLabelBean> consumeRDD=statisticsTypeHandle(labelRDD,consumeRequest); JavaRDD<TrdEcuSalesBeanBase> consumeRDD = statisticsTypeHandle(labelRDD, consumeRequest);
JavaRDD<Long>ecuRdd=consumeRDD.mapToPair(data-> Tuple2.apply(data.getEcu_id(),data.getOrder_times())).reduceByKey((x,y)->x+y) JavaRDD<Long> ecuRdd = consumeRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data.getOrder_times())).reduceByKey(Integer::sum)
.mapPartitions(data->{ .mapPartitions(data -> {
List<Long>result=new ArrayList(); List<Long> result = new ArrayList();
while (data.hasNext()){ while (data.hasNext()) {
Tuple2<Long,Integer>tp2=data.next(); Tuple2<Long, Integer> tp2 = data.next();
switch (consumeRequest.getNumberType()){ switch (consumeRequest.getNumberType()) {
case gt: case gt:
if(tp2._2()>consumeRequest.getBeginNum()){ if (tp2._2() > consumeRequest.getBeginNum()) {
result.add(tp2._1()); result.add(tp2._1());
} }
break; break;
case gte: case gte:
if(tp2._2()>=consumeRequest.getBeginNum()){ if (tp2._2() >= consumeRequest.getBeginNum()) {
result.add(tp2._1()); result.add(tp2._1());
} }
break; break;
case lt: case lt:
if(tp2._2()<consumeRequest.getEndNum()){ if (tp2._2() < consumeRequest.getEndNum()) {
result.add(tp2._1()); result.add(tp2._1());
} }
break; break;
case lte: case lte:
if(tp2._2()<=consumeRequest.getEndNum()){ if (tp2._2() <= consumeRequest.getEndNum()) {
result.add(tp2._1()); result.add(tp2._1());
} }
break; break;
case eq: case eq:
if(tp2._2().intValue()==consumeRequest.getEqualNum()){ if (tp2._2().intValue() == consumeRequest.getEqualNum()) {
result.add(tp2._1()); result.add(tp2._1());
} }
break; break;
case between: case between:
if(tp2._2()>=consumeRequest.getBeginNum() if (tp2._2() >= consumeRequest.getBeginNum()
&&tp2._2()<=consumeRequest.getEndNum()){ && tp2._2() <= consumeRequest.getEndNum()) {
result.add(tp2._1()); result.add(tp2._1());
} }
default:break; default:
break;
} }
} }
return result.iterator(); return result.iterator();
}); });
return ecuRdd; return ecuRdd;
} }
@Override
public JavaPairRDD<Long, String> filterValidMember(Integer enterpriseId, List<FilterProcessEntity> processEntityList) {
JavaRDD<ConsumeStatisticEntity> entityJavaRDD = getConsumeEntity(enterpriseId);
return entityJavaRDD.mapToPair(data -> {
Set<String> groupIds = new HashSet<>();
for (FilterProcessEntity entity : processEntityList) {
TagConsumeDoubleRequest consumeRequest = (TagConsumeDoubleRequest) entity.getRequest();
List<TrdEcuSalesBeanBase> salseBeanList = filterSalesBean(data, entity.getRequest());
if (salseBeanList.size() == 0) {
continue;
}
String tagGroupId = entity.getTagGroupId();
int totalTime = 0;
for (TrdEcuSalesBeanBase beanBase : salseBeanList) {
totalTime += beanBase.getOrder_times();
}
handleValueCompare(groupIds, consumeRequest, tagGroupId, totalTime);
}
return Tuple2.apply(data.ecuId, groupIds.size() == 0 ? null : Joiner.on(" ").join(groupIds));
}).filter(data -> data._2() != null);
}
} }
package com.gic.spark.filter; package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity; import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.entity.DataSourceHive;
import com.gic.spark.datasource.mysql.MysqlRddManager; import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdVirtualOrderBean; import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.bean.TrdVirtualOrderItemBean; import com.gic.spark.entity.bean.TrdVirtualOrderItemBean;
import com.gic.spark.entity.request.AbstractFilterRequest; import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeCommodityRequest; import com.gic.spark.entity.request.TagConsumeCommodityRequest;
import com.gic.spark.util.ConstantUtil;
import com.gic.spark.util.DateUtil; import com.gic.spark.util.DateUtil;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaPairRDD;
...@@ -25,14 +23,14 @@ import java.util.List; ...@@ -25,14 +23,14 @@ import java.util.List;
* @author: wangxk * @author: wangxk
* @date: 2020/8/12 * @date: 2020/8/12
*/ */
public class TagFirstConsumeCommodityFilter extends AbstractTagConsumRecordFilter{ public class TagFirstConsumeCommodityFilter extends AbstractTagConsumRecordFilter {
private static TagFirstConsumeCommodityFilter instance; private static TagFirstConsumeCommodityFilter instance;
public static TagFirstConsumeCommodityFilter getInstance() { public static TagFirstConsumeCommodityFilter getInstance() {
if(null==instance){ if (null == instance) {
instance=new TagFirstConsumeCommodityFilter(); instance = new TagFirstConsumeCommodityFilter();
} }
return instance; return instance;
} }
...@@ -47,44 +45,46 @@ public class TagFirstConsumeCommodityFilter extends AbstractTagConsumRecordFilte ...@@ -47,44 +45,46 @@ public class TagFirstConsumeCommodityFilter extends AbstractTagConsumRecordFilte
@Override @Override
public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) { public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeCommodityRequest commodityRequest=(TagConsumeCommodityRequest)request; TagConsumeCommodityRequest commodityRequest = (TagConsumeCommodityRequest) request;
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId),TrdVirtualOrderBean.class).javaRDD(); JavaRDD<TrdVirtualOrderBean> consumeRecordRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId), TrdVirtualOrderBean.class).javaRDD();
Dataset<Row> OrderItemDS= dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId); Dataset<Row> orderItemDS = dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId);
JavaRDD<Tuple2<TrdVirtualOrderBean,Optional<Iterable<String>>>>orderAndItemRdd=consumeRecordRDD.mapToPair(data->Tuple2.apply(data.getVirtual_id(),data)) JavaRDD<Tuple2<TrdVirtualOrderBean, Optional<Iterable<String>>>> orderAndItemRdd = consumeRecordRDD.mapToPair(data -> Tuple2.apply(data.getVirtual_id(), data))
.leftOuterJoin(OrderItemDS.select("virtual_order_id","ent_brand_id").javaRDD() .leftOuterJoin(orderItemDS.select("virtual_order_id", "ent_brand_id").javaRDD()
.mapToPair(row->Tuple2.apply(row.getLong(0),row.getString(1))) .mapToPair(row -> Tuple2.apply(row.getLong(0), row.getString(1)))
.groupByKey()) .groupByKey())
.map(data->data._2()); .map(data -> data._2());
consumeRecordRDD=statisticsTypeHandle(orderAndItemRdd,commodityRequest); consumeRecordRDD = statisticsTypeHandle(orderAndItemRdd, commodityRequest);
JavaPairRDD<Long,Long>orderRdd= consumeRecordRDD.filter(data-> StringUtils.isNotEmpty(data.getReceipts_time()))
.mapToPair(data-> Tuple2.apply(data.getEcu_id(),data)) JavaPairRDD<Long, Long> orderRdd = consumeRecordRDD.filter(data -> StringUtils.isNotEmpty(data.getReceipts_time()))
.reduceByKey((x,y)->{ .mapToPair(data -> Tuple2.apply(data.getEcu_id(), data))
if(DateUtil.strToDate(x.getReceipts_time(),DateUtil.FORMAT_DATETIME_19).getTime() .reduceByKey((x, y) -> {
<DateUtil.strToDate(y.getReceipts_time(),DateUtil.FORMAT_DATETIME_19).getTime()){ if (DateUtil.strToDate(x.getReceipts_time(), DateUtil.FORMAT_DATETIME_19).getTime()
< DateUtil.strToDate(y.getReceipts_time(), DateUtil.FORMAT_DATETIME_19).getTime()) {
return x; return x;
}else{ } else {
return y; return y;
} }
}) })
.mapToPair(data->Tuple2.apply(data._2().getVirtual_id(),data._1())); .mapToPair(data -> Tuple2.apply(data._2().getVirtual_id(), data._1()));
JavaPairRDD<Long,Long> orderItemRDD=MysqlRddManager.getPojoFromDataset(OrderItemDS,TrdVirtualOrderItemBean.class).javaRDD()
.filter(data->{ JavaPairRDD<Long, Long> orderItemRDD = MysqlRddManager.getPojoFromDataset(orderItemDS, TrdVirtualOrderItemBean.class).javaRDD()
if(StringUtils.isNotEmpty(data.getSku_code()) .filter(data -> {
&&commodityRequest.getSkuCodeList().contains(data.getSku_code())){ if (StringUtils.isNotEmpty(data.getSku_code())
&& commodityRequest.getSkuCodeList().contains(data.getSku_code())) {
return true; return true;
} }
return false; return false;
}).mapToPair(data->Tuple2.apply(data.getVirtual_order_id(),data.getVirtual_order_id())) }).mapToPair(data -> Tuple2.apply(data.getVirtual_order_id(), data.getVirtual_order_id()))
.reduceByKey((x,y)->x); .reduceByKey((x, y) -> x);
JavaRDD<Long>ecuRdd=orderRdd.leftOuterJoin(orderItemRDD) JavaRDD<Long> ecuRdd = orderRdd.leftOuterJoin(orderItemRDD)
.filter(data->data._2()._2().isPresent()) .filter(data -> data._2()._2().isPresent())
.map(data->data._2()._1()).distinct(); .map(data -> data._2()._1()).distinct();
return ecuRdd; return ecuRdd;
} }
......
...@@ -5,7 +5,6 @@ import com.gic.spark.datasource.mysql.MysqlRddManager; ...@@ -5,7 +5,6 @@ import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdVirtualOrderBean; import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest; import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeStoreRequest; import com.gic.spark.entity.request.TagConsumeStoreRequest;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.Optional; import org.apache.spark.api.java.Optional;
import org.apache.spark.sql.Row; import org.apache.spark.sql.Row;
...@@ -19,18 +18,19 @@ import java.util.List; ...@@ -19,18 +18,19 @@ import java.util.List;
* @author: wangxk * @author: wangxk
* @date: 2020/8/11 * @date: 2020/8/11
*/ */
public class TagHistoryOfflineConsumptionStoreFilter extends AbstractTagConsumRecordFilter{ public class TagHistoryOfflineConsumptionStoreFilter extends AbstractTagConsumRecordFilter {
private static TagHistoryOfflineConsumptionStoreFilter instance; private static TagHistoryOfflineConsumptionStoreFilter instance;
public static TagHistoryOfflineConsumptionStoreFilter getInstance() { public static TagHistoryOfflineConsumptionStoreFilter getInstance() {
if(null==instance){ if (null == instance) {
instance=new TagHistoryOfflineConsumptionStoreFilter(); instance = new TagHistoryOfflineConsumptionStoreFilter();
} }
return instance; return instance;
} }
private TagHistoryOfflineConsumptionStoreFilter(){} private TagHistoryOfflineConsumptionStoreFilter() {
}
@Override @Override
public List<DataSourceEntity> necessarySourceList() { public List<DataSourceEntity> necessarySourceList() {
...@@ -42,22 +42,22 @@ public class TagHistoryOfflineConsumptionStoreFilter extends AbstractTagConsumRe ...@@ -42,22 +42,22 @@ public class TagHistoryOfflineConsumptionStoreFilter extends AbstractTagConsumRe
@Override @Override
public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) { public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeStoreRequest storeRequest=(TagConsumeStoreRequest)request; TagConsumeStoreRequest storeRequest = (TagConsumeStoreRequest) request;
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId),TrdVirtualOrderBean.class).javaRDD(); JavaRDD<TrdVirtualOrderBean> consumeRecordRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId), TrdVirtualOrderBean.class).javaRDD();
JavaRDD<Row>virtualOrderItemRdd=dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId).select("virtual_order_id","ent_brand_id").toJavaRDD(); JavaRDD<Row> virtualOrderItemRdd = dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId).select("virtual_order_id", "ent_brand_id").toJavaRDD();
JavaRDD<Tuple2<TrdVirtualOrderBean,Optional<Iterable<String>>>>orderRdd=consumeRecordRDD.mapToPair(data->Tuple2.apply(data.getVirtual_id(),data)) JavaRDD<Tuple2<TrdVirtualOrderBean, Optional<Iterable<String>>>> orderRdd = consumeRecordRDD.mapToPair(data -> Tuple2.apply(data.getVirtual_id(), data))
.leftOuterJoin(virtualOrderItemRdd.mapToPair(row->Tuple2.apply(row.getLong(0),row.getString(1))).groupByKey()) .leftOuterJoin(virtualOrderItemRdd.mapToPair(row -> Tuple2.apply(row.getLong(0), row.getString(1))).groupByKey())
.map(data->data._2()); .map(data -> data._2());
consumeRecordRDD=statisticsTypeHandle(orderRdd,storeRequest); consumeRecordRDD = statisticsTypeHandle(orderRdd, storeRequest);
JavaRDD<Long>ecuRdd=consumeRecordRDD.filter(data->data.getOrder_channel_code()==1 JavaRDD<Long> ecuRdd = consumeRecordRDD.filter(data -> data.getOrder_channel_code() == 1
&& null!=data.getStore_info_id()) && null != data.getStore_info_id())
.mapToPair(data-> Tuple2.apply(data.getEcu_id(),data.getStore_info_id())) .mapToPair(data -> Tuple2.apply(data.getEcu_id(), data.getStore_info_id()))
.filter(data->storeRequest.getStoreList().contains(String.valueOf(data._2()))) .filter(data -> storeRequest.getStoreList().contains(String.valueOf(data._2())))
.reduceByKey((x,y)->x) .reduceByKey((x, y) -> x)
.map(data->data._1()); .map(data -> data._1());
return ecuRdd; return ecuRdd;
} }
......
package com.gic.spark.filter; package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity; import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.entity.DataSourceHive;
import com.gic.spark.datasource.mysql.MysqlRddManager; import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdVirtualOrderBean; import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.bean.TrdVirtualOrderItemBean; import com.gic.spark.entity.bean.TrdVirtualOrderItemBean;
import com.gic.spark.entity.request.AbstractFilterRequest; import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeCommodityRequest; import com.gic.spark.entity.request.TagConsumeCommodityRequest;
import com.gic.spark.util.ConstantUtil;
import com.gic.spark.util.DateUtil; import com.gic.spark.util.DateUtil;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaPairRDD;
...@@ -25,14 +23,14 @@ import java.util.List; ...@@ -25,14 +23,14 @@ import java.util.List;
* @author: wangxk * @author: wangxk
* @date: 2020/8/12 * @date: 2020/8/12
*/ */
public class TagLatelyConsumeCommodityFilter extends AbstractTagConsumRecordFilter{ public class TagLatelyConsumeCommodityFilter extends AbstractTagConsumRecordFilter {
private static TagLatelyConsumeCommodityFilter instance; private static TagLatelyConsumeCommodityFilter instance;
public static TagLatelyConsumeCommodityFilter getInstance() { public static TagLatelyConsumeCommodityFilter getInstance() {
if(null==instance){ if (null == instance) {
instance=new TagLatelyConsumeCommodityFilter(); instance = new TagLatelyConsumeCommodityFilter();
} }
return instance; return instance;
} }
...@@ -47,44 +45,44 @@ public class TagLatelyConsumeCommodityFilter extends AbstractTagConsumRecordFilt ...@@ -47,44 +45,44 @@ public class TagLatelyConsumeCommodityFilter extends AbstractTagConsumRecordFilt
@Override @Override
public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) { public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeCommodityRequest commodityRequest=(TagConsumeCommodityRequest)request; TagConsumeCommodityRequest commodityRequest = (TagConsumeCommodityRequest) request;
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId),TrdVirtualOrderBean.class).javaRDD(); JavaRDD<TrdVirtualOrderBean> consumeRecordRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId), TrdVirtualOrderBean.class).javaRDD();
Dataset<Row> OrderItemDS= dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId); Dataset<Row> OrderItemDS = dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId);
JavaRDD<Tuple2<TrdVirtualOrderBean,Optional<Iterable<String>>>>orderAndItemRdd=consumeRecordRDD.mapToPair(data->Tuple2.apply(data.getVirtual_id(),data)) JavaRDD<Tuple2<TrdVirtualOrderBean, Optional<Iterable<String>>>> orderAndItemRdd = consumeRecordRDD.mapToPair(data -> Tuple2.apply(data.getVirtual_id(), data))
.leftOuterJoin(OrderItemDS.select("virtual_order_id","ent_brand_id").javaRDD() .leftOuterJoin(OrderItemDS.select("virtual_order_id", "ent_brand_id").javaRDD()
.mapToPair(row->Tuple2.apply(row.getLong(0),row.getString(1))) .mapToPair(row -> Tuple2.apply(row.getLong(0), row.getString(1)))
.groupByKey()) .groupByKey())
.map(data->data._2()); .map(data -> data._2());
consumeRecordRDD=statisticsTypeHandle(orderAndItemRdd,commodityRequest); consumeRecordRDD = statisticsTypeHandle(orderAndItemRdd, commodityRequest);
JavaPairRDD<Long,Long>orderRdd= consumeRecordRDD.filter(data-> StringUtils.isNotEmpty(data.getReceipts_time())) JavaPairRDD<Long, Long> orderRdd = consumeRecordRDD.filter(data -> StringUtils.isNotEmpty(data.getReceipts_time()))
.mapToPair(data-> Tuple2.apply(data.getEcu_id(),data)) .mapToPair(data -> Tuple2.apply(data.getEcu_id(), data))
.reduceByKey((x,y)->{ .reduceByKey((x, y) -> {
if(DateUtil.strToDate(x.getReceipts_time(),DateUtil.FORMAT_DATETIME_19).getTime() if (DateUtil.strToDate(x.getReceipts_time(), DateUtil.FORMAT_DATETIME_19).getTime()
>DateUtil.strToDate(y.getReceipts_time(),DateUtil.FORMAT_DATETIME_19).getTime()){ > DateUtil.strToDate(y.getReceipts_time(), DateUtil.FORMAT_DATETIME_19).getTime()) {
return x; return x;
}else{ } else {
return y; return y;
} }
}) })
.mapToPair(data->Tuple2.apply(data._2().getVirtual_id(),data._1())); .mapToPair(data -> Tuple2.apply(data._2().getVirtual_id(), data._1()));
JavaPairRDD<Long,Long> orderItemRDD=MysqlRddManager.getPojoFromDataset(OrderItemDS,TrdVirtualOrderItemBean.class).javaRDD() JavaPairRDD<Long, Long> orderItemRDD = MysqlRddManager.getPojoFromDataset(OrderItemDS, TrdVirtualOrderItemBean.class).javaRDD()
.filter(data->{ .filter(data -> {
if(StringUtils.isNotEmpty(data.getSku_code()) if (StringUtils.isNotEmpty(data.getSku_code())
&&commodityRequest.getSkuCodeList().contains(data.getSku_code())){ && commodityRequest.getSkuCodeList().contains(data.getSku_code())) {
return true; return true;
} }
return false; return false;
}).mapToPair(data->Tuple2.apply(data.getVirtual_order_id(),data.getVirtual_order_id())) }).mapToPair(data -> Tuple2.apply(data.getVirtual_order_id(), data.getVirtual_order_id()))
.reduceByKey((x,y)->x); .reduceByKey((x, y) -> x);
JavaRDD<Long>ecuRdd=orderRdd.leftOuterJoin(orderItemRDD) JavaRDD<Long> ecuRdd = orderRdd.leftOuterJoin(orderItemRDD)
.filter(data->data._2()._2().isPresent()) .filter(data -> data._2()._2().isPresent())
.map(data->data._2()._1()).distinct(); .map(data -> data._2()._1()).distinct();
return ecuRdd; return ecuRdd;
} }
......
...@@ -2,35 +2,43 @@ package com.gic.spark.filter; ...@@ -2,35 +2,43 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity; import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager; import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdEcuBrandLabelBean; import com.gic.spark.entity.FilterProcessEntity;
import com.gic.spark.entity.bean.TrdEcuSalesLabelBean; import com.gic.spark.entity.bean.TrdEcuBrandBean;
import com.gic.spark.entity.bean.TrdEcuSalesBeanBase;
import com.gic.spark.entity.request.AbstractFilterRequest; import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeDoubleRequest;
import com.gic.spark.entity.request.TagConsumeRequest; import com.gic.spark.entity.request.TagConsumeRequest;
import com.gic.spark.util.CommonUtil; import com.gic.spark.util.CommonUtil;
import com.google.common.base.Joiner;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.Optional; import org.apache.spark.api.java.Optional;
import scala.Tuple2; import scala.Tuple2;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set;
/** /**
* @description: * @description:
* @author: wangxk * @author: wangxk
* @date: 2020/8/26 * @date: 2020/8/26
*/ */
public class TagUnitPriceFilter extends AbstractTagConsumFilter{ public class TagUnitPriceFilter extends AbstractTagConsumFilter {
private static TagUnitPriceFilter instance; private static TagUnitPriceFilter instance;
public static TagUnitPriceFilter getInstance() { public static TagUnitPriceFilter getInstance() {
if(null==instance){ if (null == instance) {
instance=new TagUnitPriceFilter(); instance = new TagUnitPriceFilter();
} }
return instance; return instance;
} }
private TagUnitPriceFilter(){} private TagUnitPriceFilter() {
}
@Override @Override
public List<DataSourceEntity> necessarySourceList() { public List<DataSourceEntity> necessarySourceList() {
List<DataSourceEntity> result = new ArrayList(); List<DataSourceEntity> result = new ArrayList();
...@@ -41,67 +49,106 @@ public class TagUnitPriceFilter extends AbstractTagConsumFilter{ ...@@ -41,67 +49,106 @@ public class TagUnitPriceFilter extends AbstractTagConsumFilter{
@Override @Override
public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) { public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeRequest consumeRequest=(TagConsumeRequest)request; TagConsumeRequest consumeRequest = (TagConsumeRequest) request;
JavaRDD<TrdEcuSalesLabelBean> salesLabelRDD=MysqlRddManager.getPojoFromDataset(dataSourceHiveSalesLabel.getDatasetByEntId(enterpriseId), TrdEcuSalesLabelBean.class).javaRDD(); JavaRDD<TrdEcuSalesBeanBase> salesLabelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveSalesLabel.getDatasetByEntId(enterpriseId), TrdEcuSalesBeanBase.class).javaRDD();
JavaRDD<TrdEcuBrandLabelBean> brandLabelRDD=MysqlRddManager.getPojoFromDataset(dataSourceHiveBrandLabel.getDatasetByEntId(enterpriseId), TrdEcuBrandLabelBean.class).javaRDD(); JavaRDD<TrdEcuBrandBean> brandLabelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveBrandLabel.getDatasetByEntId(enterpriseId), TrdEcuBrandBean.class).javaRDD();
JavaRDD<Tuple2<TrdEcuSalesLabelBean,Optional<Iterable<TrdEcuBrandLabelBean>>>>labelRDD=salesLabelRDD.mapToPair(data->Tuple2.apply(data.getEcu_id(),data)) JavaRDD<Tuple2<TrdEcuSalesBeanBase, Optional<Iterable<TrdEcuBrandBean>>>> labelRDD = salesLabelRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data))
.leftOuterJoin(brandLabelRDD.mapToPair(data->Tuple2.apply(data.getEcu_id(),data)).groupByKey()) .leftOuterJoin(brandLabelRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data)).groupByKey())
.map(data->data._2()); .map(data -> data._2());
JavaRDD<TrdEcuSalesLabelBean> consumeRDD=statisticsTypeHandle(labelRDD,consumeRequest); JavaRDD<TrdEcuSalesBeanBase> consumeRDD = statisticsTypeHandle(labelRDD, consumeRequest);
int configStatus= CommonUtil.getConfigStatus(enterpriseId); int configStatus = CommonUtil.getConfigStatus(enterpriseId);
JavaRDD<Long>ecuRdd=consumeRDD.mapToPair(data-> Tuple2.apply(data.getEcu_id(),data)) JavaRDD<Long> ecuRdd = consumeRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data))
.reduceByKey((x,y)->{ .reduceByKey((x, y) -> {
x.setReceive_amt(x.getReceive_amt()+y.getReceive_amt()); x.setReceive_amt(x.getReceive_amt() + y.getReceive_amt());
x.setPay_amt(x.getPay_amt()+y.getPay_amt()); x.setPay_amt(x.getPay_amt() + y.getPay_amt());
x.setOrder_times(x.getOrder_times()+y.getOrder_times()); x.setOrder_times(x.getOrder_times() + y.getOrder_times());
return x; return x;
}) })
.mapPartitions(data->{ .mapPartitions(data -> {
List<Long> result=new ArrayList(); List<Long> result = new ArrayList();
while (data.hasNext()){ while (data.hasNext()) {
Tuple2<Long,TrdEcuSalesLabelBean> tp2=data.next(); Tuple2<Long, TrdEcuSalesBeanBase> tp2 = data.next();
double CusSinglePiece=1==configStatus?CommonUtil.isEmptyDouble2double(tp2._2().getPay_amt())/CommonUtil.isEmptyInteger2int(tp2._2().getSeff_goods_num()) double CusSinglePiece = 1 == configStatus ? CommonUtil.isEmptyDouble2double(tp2._2().getPay_amt()) / CommonUtil.isEmptyInteger2int(tp2._2().getSeff_goods_num())
:CommonUtil.isEmptyDouble2double(tp2._2().getReceive_amt())/CommonUtil.isEmptyInteger2int(tp2._2().getSeff_goods_num()); : CommonUtil.isEmptyDouble2double(tp2._2().getReceive_amt()) / CommonUtil.isEmptyInteger2int(tp2._2().getSeff_goods_num());
switch (consumeRequest.getNumberType()){ switch (consumeRequest.getNumberType()) {
case gt: case gt:
if(CusSinglePiece>consumeRequest.getBeginNum()){ if (CusSinglePiece > consumeRequest.getBeginNum()) {
result.add(tp2._1()); result.add(tp2._1());
} }
break; break;
case gte: case gte:
if(CusSinglePiece>=consumeRequest.getBeginNum()){ if (CusSinglePiece >= consumeRequest.getBeginNum()) {
result.add(tp2._1()); result.add(tp2._1());
} }
break; break;
case lt: case lt:
if(CusSinglePiece<consumeRequest.getEndNum()){ if (CusSinglePiece < consumeRequest.getEndNum()) {
result.add(tp2._1()); result.add(tp2._1());
} }
break; break;
case lte: case lte:
if(CusSinglePiece<=consumeRequest.getEndNum()){ if (CusSinglePiece <= consumeRequest.getEndNum()) {
result.add(tp2._1()); result.add(tp2._1());
} }
break; break;
case eq: case eq:
if(CusSinglePiece==consumeRequest.getEqualNum()){ if (CusSinglePiece == consumeRequest.getEqualNum()) {
result.add(tp2._1()); result.add(tp2._1());
} }
break; break;
case between: case between:
if(CusSinglePiece>=consumeRequest.getBeginNum() if (CusSinglePiece >= consumeRequest.getBeginNum()
&&CusSinglePiece<=consumeRequest.getEndNum()){ && CusSinglePiece <= consumeRequest.getEndNum()) {
result.add(tp2._1()); result.add(tp2._1());
} }
default:break; default:
break;
} }
} }
return result.iterator(); return result.iterator();
}); });
return ecuRdd; return ecuRdd;
} }
@Override
public JavaPairRDD<Long, String> filterValidMember(Integer enterpriseId, List<FilterProcessEntity> processEntityList) {
JavaRDD<ConsumeStatisticEntity> entityJavaRDD = getConsumeEntity(enterpriseId);
int configStatus = CommonUtil.getConfigStatus(enterpriseId);
JavaPairRDD<Long, String> rdd = entityJavaRDD.mapToPair(data -> {
Set<String> groupIds = new HashSet<>();
for (FilterProcessEntity entity : processEntityList) {
TagConsumeDoubleRequest consumeRequest = (TagConsumeDoubleRequest) entity.getRequest();
List<TrdEcuSalesBeanBase> salseBeanList = filterSalesBean(data, entity.getRequest());
if (salseBeanList.size() == 0) {
continue;
}
String tagGroupId = entity.getTagGroupId();
double receiveAmount = 0;
double payAmount = 0;
int orderTimes = 0;
for (TrdEcuSalesBeanBase beanBase : salseBeanList) {
receiveAmount += beanBase.getReceive_amt();
payAmount += beanBase.getPay_amt();
orderTimes += beanBase.getSeff_goods_num();
}
double cusSinglePiece = 1 == configStatus ? payAmount / orderTimes
: receiveAmount / orderTimes;
handleValueCompare(groupIds, consumeRequest, tagGroupId, cusSinglePiece);
}
return Tuple2.apply(data.ecuId, groupIds.size() == 0 ? null : Joiner.on(" ").join(groupIds));
}).filter(data -> data._2() != null);
return rdd;
}
} }
...@@ -12,6 +12,9 @@ import java.util.List; ...@@ -12,6 +12,9 @@ import java.util.List;
public class TagProcessEntity { public class TagProcessEntity {
int enterpriseId; int enterpriseId;
long tagGroupId; long tagGroupId;
/**
* 1 实时 2 非实时
*/
Integer realTime; Integer realTime;
int level; int level;
List<TagConditionDTO> tagList; List<TagConditionDTO> tagList;
......
...@@ -57,7 +57,8 @@ public class TagProcessManager { ...@@ -57,7 +57,8 @@ public class TagProcessManager {
private List<SceneCrowdDTO> sceneCrowdDTOList = new ArrayList(); private List<SceneCrowdDTO> sceneCrowdDTOList = new ArrayList();
private MysqlRddManager member4RddManager; private MysqlRddManager member4RddManager;
private MysqlRddManager enterprise4RddManager; private MysqlRddManager enterprise4RddManager;
private DataSourceSharding memberSharding4Datasource; private DataSourceSharding enterpriseUserDatasource;
private DataSourceSharding enterpriseUserRelationDatasource;
private MysqlDatasource member4Datasource = null; private MysqlDatasource member4Datasource = null;
private MysqlDatasource enterprise4Datasource = null; private MysqlDatasource enterprise4Datasource = null;
private boolean isProduction; private boolean isProduction;
...@@ -98,7 +99,8 @@ public class TagProcessManager { ...@@ -98,7 +99,8 @@ public class TagProcessManager {
} }
member4RddManager = member4Datasource.buildRddManager(); member4RddManager = member4Datasource.buildRddManager();
enterprise4RddManager = enterprise4Datasource.buildRddManager(); enterprise4RddManager = enterprise4Datasource.buildRddManager();
memberSharding4Datasource = new DataSourceSharding(AppEnvUtil.MEMBER_SHARDING_4, ConstantUtil.TAB_ENTERPRISE_USER); enterpriseUserDatasource = new DataSourceSharding(AppEnvUtil.MEMBER_SHARDING_4, ConstantUtil.TAB_ENTERPRISE_USER);
enterpriseUserRelationDatasource = new DataSourceSharding(AppEnvUtil.MEMBER_SHARDING_4, ConstantUtil.TAB_ENTERPRISE_USER_RELATION);
List<TabSceneCrowd> sceneCrowdList = member4RddManager.getPojo("tab_scene_crowd", TabSceneCrowd.class, null) List<TabSceneCrowd> sceneCrowdList = member4RddManager.getPojo("tab_scene_crowd", TabSceneCrowd.class, null)
.filter(new Column("delete_flag").equalTo(0)) .filter(new Column("delete_flag").equalTo(0))
...@@ -175,15 +177,13 @@ public class TagProcessManager { ...@@ -175,15 +177,13 @@ public class TagProcessManager {
for (TagConditionDTO conditionDTO : processEntity.tagList) { for (TagConditionDTO conditionDTO : processEntity.tagList) {
if (tagIdToFilterMap.containsKey(conditionDTO.getTagId())) { if (tagIdToFilterMap.containsKey(conditionDTO.getTagId())) {
for (DataSourceEntity sourceEntity : tagIdToFilterMap.get(conditionDTO.getTagId()).necessarySourceList()) { for (DataSourceEntity sourceEntity : tagIdToFilterMap.get(conditionDTO.getTagId()).necessarySourceList()) {
// System.out.println("enterpriseId==>"+enterpriseTagEntry.getKey()); DataSourceManager.getInstance().addSourceEntity(sourceEntity, enterpriseTagEntry.getKey());
// System.out.println("SourceKey==>"+sourceEntity.getSourceKey());
// System.out.println("HiveTableName==>"+sourceEntity.getHiveTableName());
DataSourceManager.getInstance().addSourceEntity(sourceEntity, enterpriseTagEntry.getKey().intValue());
} }
} }
} }
} }
DataSourceManager.getInstance().addSourceEntity(memberSharding4Datasource, enterpriseTagEntry.getKey().intValue()); DataSourceManager.getInstance().addSourceEntity(enterpriseUserDatasource, enterpriseTagEntry.getKey());
DataSourceManager.getInstance().addSourceEntity(enterpriseUserRelationDatasource, enterpriseTagEntry.getKey());
} }
if (extractData) { if (extractData) {
...@@ -194,8 +194,8 @@ public class TagProcessManager { ...@@ -194,8 +194,8 @@ public class TagProcessManager {
//处理标签数据 //处理标签数据
JavaSparkContext jsc = SparkEnvManager.getInstance().getJsc(); JavaSparkContext jsc = SparkEnvManager.getInstance().getJsc();
List<Long> sceneCrowdIdList = new ArrayList();
for (Map.Entry<Integer, List<TagProcessEntity>> enterpriseTagEntry : tagGroupByEnterpriseMap.entrySet()) { for (Map.Entry<Integer, List<TagProcessEntity>> enterpriseTagEntry : tagGroupByEnterpriseMap.entrySet()) {
List<Long> sceneCrowdIdList = new ArrayList();
Integer enterpriseId = enterpriseTagEntry.getKey(); Integer enterpriseId = enterpriseTagEntry.getKey();
String indexName = EsRequestUtil.getESIindexName(enterpriseId, this.isProduction()); String indexName = EsRequestUtil.getESIindexName(enterpriseId, this.isProduction());
...@@ -210,7 +210,6 @@ public class TagProcessManager { ...@@ -210,7 +210,6 @@ public class TagProcessManager {
JavaPairRDD<Long, String> filterRdd = tagFilter.filterValidMember(enterpriseId, filterRequest).mapToPair(data -> Tuple2.apply(data, groupId)); JavaPairRDD<Long, String> filterRdd = tagFilter.filterValidMember(enterpriseId, filterRequest).mapToPair(data -> Tuple2.apply(data, groupId));
System.out.println("filterRdd==>" + filterRdd.count());
if (null == memberGroupRdd) { if (null == memberGroupRdd) {
memberGroupRdd = filterRdd; memberGroupRdd = filterRdd;
} else { } else {
...@@ -222,7 +221,8 @@ public class TagProcessManager { ...@@ -222,7 +221,8 @@ public class TagProcessManager {
} }
if (null != memberGroupRdd) { if (null != memberGroupRdd) {
JavaPairRDD<Long, Long> userRdd = memberSharding4Datasource.getDatasetByEnterpriseId(enterpriseId).select("id").javaRDD() JavaPairRDD<Long, Long> userRdd = enterpriseUserDatasource.getDatasetByEnterpriseId(enterpriseId).select("id", "delete_flag").javaRDD()
.filter(data -> 0 == (Integer) data.getAs("delete_flag"))
.mapToPair(data -> Tuple2.apply((Long) data.getAs("id"), (Long) data.getAs("id"))) .mapToPair(data -> Tuple2.apply((Long) data.getAs("id"), (Long) data.getAs("id")))
.reduceByKey((x, y) -> x); .reduceByKey((x, y) -> x);
...@@ -255,6 +255,7 @@ public class TagProcessManager { ...@@ -255,6 +255,7 @@ public class TagProcessManager {
//处理混合标签 //处理混合标签
/**
JavaPairRDD<Long, String> searchRDD = null; JavaPairRDD<Long, String> searchRDD = null;
for (TagProcessEntity mixEntity : enterpriseTagEntry.getValue()) { for (TagProcessEntity mixEntity : enterpriseTagEntry.getValue()) {
...@@ -309,6 +310,7 @@ public class TagProcessManager { ...@@ -309,6 +310,7 @@ public class TagProcessManager {
updateIndex(groupRDD, indexName); updateIndex(groupRDD, indexName);
} }
*/
} }
} }
......
package com.gic.spark.util; package com.gic.spark.util;
import com.gic.spark.entity.table.TabDataActuallyPaidConfig; import com.gic.spark.entity.table.TabDataActuallyPaidConfig;
import scala.Tuple2;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
...@@ -13,46 +12,54 @@ import java.util.Map; ...@@ -13,46 +12,54 @@ import java.util.Map;
*/ */
public class CommonUtil { public class CommonUtil {
public static Map<Integer,TabDataActuallyPaidConfig> dataActuallyPaidConfigMap=new HashMap(); public static Map<Integer, TabDataActuallyPaidConfig> dataActuallyPaidConfigMap = new HashMap();
/** /**
* 1:实付 * 1:实付
* 0:关闭(应付) * 0:关闭(应付)
*
* @param enterprise_Id * @param enterprise_Id
* @return * @return
*/ */
public static Integer getConfigStatus(Integer enterprise_Id){ public static Integer getConfigStatus(Integer enterprise_Id) {
TabDataActuallyPaidConfig dataActuallyPaidConfig=dataActuallyPaidConfigMap.get(enterprise_Id); TabDataActuallyPaidConfig dataActuallyPaidConfig = dataActuallyPaidConfigMap.get(enterprise_Id);
if(null==dataActuallyPaidConfig||null==dataActuallyPaidConfig.getConfig_Status()){ if (null == dataActuallyPaidConfig || null == dataActuallyPaidConfig.getConfig_Status()) {
return 0; return 0;
}else{ } else {
return dataActuallyPaidConfig.getConfig_Status(); return dataActuallyPaidConfig.getConfig_Status();
} }
} }
public static int isEmptyInteger2int(Integer param,int defaultValue){ public static int isEmptyInteger2int(Integer param, int defaultValue) {
return null==param?defaultValue:param; return null == param ? defaultValue : param;
} }
public static int isEmptyInteger2int(Integer param){
return isEmptyInteger2int(param,0); public static int isEmptyInteger2int(Integer param) {
return isEmptyInteger2int(param, 0);
} }
public static long isEmptyLong2long(Long param,long defaultValue){
return null==param?defaultValue:param; public static long isEmptyLong2long(Long param, long defaultValue) {
return null == param ? defaultValue : param;
} }
public static long isEmptyLong2long(Long param){
return isEmptyLong2long(param,0l); public static long isEmptyLong2long(Long param) {
return isEmptyLong2long(param, 0l);
} }
public static float isEmptyFloat2float(Float param,float defaultValue){
return null==param?defaultValue:param; public static float isEmptyFloat2float(Float param, float defaultValue) {
return null == param ? defaultValue : param;
} }
public static float isEmptyFloat2float(Float param){
return isEmptyFloat2float(param,0f); public static float isEmptyFloat2float(Float param) {
return isEmptyFloat2float(param, 0f);
} }
public static double isEmptyDouble2double(Double param,double defaultValue){
return null==param?defaultValue:param; public static double isEmptyDouble2double(Double param, double defaultValue) {
return null == param ? defaultValue : param;
} }
public static double isEmptyDouble2double(Double param){
return isEmptyDouble2double(param,0d); public static double isEmptyDouble2double(Double param) {
return isEmptyDouble2double(param, 0d);
} }
} }
...@@ -8,6 +8,7 @@ package com.gic.spark.util; ...@@ -8,6 +8,7 @@ package com.gic.spark.util;
public class ConstantUtil { public class ConstantUtil {
public static final String TAB_ENTERPRISE_USER="tab_enterprise_user"; public static final String TAB_ENTERPRISE_USER="tab_enterprise_user";
public static final String TAB_ENTERPRISE_USER_RELATION="tab_enterprise_user_relation";
public static final String TAB_COUPON_LOG="tab_coupon_log"; public static final String TAB_COUPON_LOG="tab_coupon_log";
public static final String TAB_INTEGRAL_CU_CHANGE_LOG="tab_integral_cu_change_log"; public static final String TAB_INTEGRAL_CU_CHANGE_LOG="tab_integral_cu_change_log";
public static final String ADS_GIC_TRD_ECU_SALES_LABEL_D="demoads.ads_gic_trd_ecu_sales_label_d"; public static final String ADS_GIC_TRD_ECU_SALES_LABEL_D="demoads.ads_gic_trd_ecu_sales_label_d";
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment