Commit b381a3a9 by zhangyannao

代码提交

parent 742d13ef
package com.gic.spark.entity;
import com.gic.spark.entity.request.AbstractFilterRequest;
/**
* Created by paste on 2021-03-11 13:51
*
* @author paste
*/
public class FilterProcessEntity {
String tagGroupId;
AbstractFilterRequest request;
public FilterProcessEntity(String tagGroupId, AbstractFilterRequest request) {
this.tagGroupId = tagGroupId;
this.request = request;
}
public String getTagGroupId() {
return tagGroupId;
}
public void setTagGroupId(String tagGroupId) {
this.tagGroupId = tagGroupId;
}
public AbstractFilterRequest getRequest() {
return request;
}
public void setRequest(AbstractFilterRequest request) {
this.request = request;
}
}
......@@ -3,14 +3,13 @@ package com.gic.spark.entity;
import com.gic.spark.entity.table.TabSceneCrowd;
import java.util.LinkedList;
import java.util.List;
/**
* @description:
* @author: wangxk
* @date: 2020/4/16
*/
public class SceneCrowdDTO extends TabSceneCrowd{
public class SceneCrowdDTO extends TabSceneCrowd {
private LinkedList<TagConditionGroupDTO> conditionGroupDTOList;
......@@ -18,7 +17,7 @@ public class SceneCrowdDTO extends TabSceneCrowd{
public SceneCrowdDTO() {
}
public SceneCrowdDTO(TabSceneCrowd sceneCrowd,LinkedList<TagConditionGroupDTO> conditionGroupDTOList){
public SceneCrowdDTO(TabSceneCrowd sceneCrowd, LinkedList<TagConditionGroupDTO> conditionGroupDTOList) {
this.setId(sceneCrowd.getId());
this.setScene_Crowd_Name(sceneCrowd.getScene_Crowd_Name());
this.setReal_Time(sceneCrowd.getReal_Time());
......
......@@ -8,10 +8,9 @@ import java.sql.Timestamp;
* @author: wangxk
* @date: 2020/4/28
*/
public class TagCouponBean implements Serializable{
public class TagCouponBean implements Serializable {
private Long coupon_id;
private Long ecu_Id;
private Long scu_Id;
private Long acu_Id;
private Long mcu_Id;
......@@ -26,14 +25,6 @@ public class TagCouponBean implements Serializable{
this.coupon_id = coupon_id;
}
public Long getEcu_Id() {
return ecu_Id;
}
public void setEcu_Id(Long ecu_Id) {
this.ecu_Id = ecu_Id;
}
public Long getScu_Id() {
return scu_Id;
}
......
......@@ -13,7 +13,6 @@ public class TagIntegralBean implements Serializable {
private Integer enterprise_Id;
private Integer cu_Type;
private Long cu_Id;
private Long file_ecu_id;
private Integer integral_Value;
private java.sql.Timestamp create_Time;
private java.sql.Timestamp limit_time;
......@@ -42,14 +41,6 @@ public class TagIntegralBean implements Serializable {
this.cu_Id = cu_Id;
}
public Long getFile_ecu_id() {
return file_ecu_id;
}
public void setFile_ecu_id(Long file_ecu_id) {
this.file_ecu_id = file_ecu_id;
}
public Integer getIntegral_Value() {
return integral_Value;
}
......
package com.gic.spark.entity.bean;
import java.io.Serializable;
/**
* @description:
* @author: wangxk
* @date: 2020/5/7
*/
public class TrdEcuBrandBean extends TrdEcuSalesBeanBase implements Serializable {
private Long ent_brand_id;//商品品牌id
public Long getEnt_brand_id() {
return ent_brand_id;
}
public void setEnt_brand_id(Long ent_brand_id) {
this.ent_brand_id = ent_brand_id;
}
}
package com.gic.spark.entity.bean;
import java.io.Serializable;
/**
* @description:
* @author: wangxk
* @date: 2020/5/7
*/
public class TrdEcuBrandLabelBean implements Serializable {
private Integer ent_id; //企业id
private Long mbr_area_id;//卡域id
private Long ecu_id;//用户id
private Integer store_info_id;//门店id
private Integer order_times; //消费次数(根据配置取的)
private Integer seff_order_cnt;//配置订单数
private Integer seff_goods_num;//配置销售件数
private Double receive_amt;//应收额
private Double pay_amt;//实付额
private Double total_amt;//吊牌价总额
private Integer sleep_days;//休眠天数
private Long ent_brand_id;//商品品牌id
public Integer getEnt_id() {
return ent_id;
}
public void setEnt_id(Integer ent_id) {
this.ent_id = ent_id;
}
public Long getMbr_area_id() {
return mbr_area_id;
}
public void setMbr_area_id(Long mbr_area_id) {
this.mbr_area_id = mbr_area_id;
}
public Long getEcu_id() {
return ecu_id;
}
public void setEcu_id(Long ecu_id) {
this.ecu_id = ecu_id;
}
public Integer getStore_info_id() {
return store_info_id;
}
public void setStore_info_id(Integer store_info_id) {
this.store_info_id = store_info_id;
}
public Integer getOrder_times() {
return order_times;
}
public void setOrder_times(Integer order_times) {
this.order_times = order_times;
}
public Integer getSeff_goods_num() {
return seff_goods_num;
}
public void setSeff_goods_num(Integer seff_goods_num) {
this.seff_goods_num = seff_goods_num;
}
public Double getPay_amt() {
return pay_amt;
}
public void setPay_amt(Double pay_amt) {
this.pay_amt = pay_amt;
}
public Double getTotal_amt() {
return total_amt;
}
public void setTotal_amt(Double total_amt) {
this.total_amt = total_amt;
}
public Integer getSleep_days() {
return sleep_days;
}
public void setSleep_days(Integer sleep_days) {
this.sleep_days = sleep_days;
}
public Long getEnt_brand_id() {
return ent_brand_id;
}
public void setEnt_brand_id(Long ent_brand_id) {
this.ent_brand_id = ent_brand_id;
}
public Double getReceive_amt() {
return receive_amt;
}
public void setReceive_amt(Double receive_amt) {
this.receive_amt = receive_amt;
}
public Integer getSeff_order_cnt() {
return seff_order_cnt;
}
public void setSeff_order_cnt(Integer seff_order_cnt) {
this.seff_order_cnt = seff_order_cnt;
}
}
package com.gic.spark.entity.bean;
import java.io.Serializable;
/**
* @description:
* @author: wangxk
* @date: 2020/5/7
*/
public class TrdEcuChannelBean extends TrdEcuSalesBeanBase implements Serializable {
private Long channel_code;//渠道
public Long getChannel_code() {
return channel_code;
}
public void setChannel_code(Long channel_code) {
this.channel_code = channel_code;
}
}
......@@ -7,7 +7,7 @@ import java.io.Serializable;
* @author: wangxk
* @date: 2020/5/7
*/
public class TrdEcuSalesLabelBean implements Serializable {
public class TrdEcuSalesBeanBase implements Serializable {
private Integer ent_id; //企业id
private Long mbr_area_id;//卡域id
......
......@@ -3,77 +3,77 @@ package com.gic.spark.entity.table;
import java.io.Serializable;
public class TabDataActuallyPaidConfig implements Serializable{
public class TabDataActuallyPaidConfig implements Serializable {
private Integer actually_Paid_Config_Id;
private Integer enterprise_Id;
private Integer classify;
private Integer config_Status;
private Integer status;
private java.sql.Timestamp create_Time;
private java.sql.Timestamp update_Time;
private Integer actually_Paid_Config_Id;
private Integer enterprise_Id;
private Integer classify;
private Integer config_Status;
private Integer status;
private java.sql.Timestamp create_Time;
private java.sql.Timestamp update_Time;
public Integer getActually_Paid_Config_Id() {
return actually_Paid_Config_Id;
}
public Integer getActually_Paid_Config_Id() {
return actually_Paid_Config_Id;
}
public void setActually_Paid_Config_Id(Integer actually_Paid_Config_Id) {
this.actually_Paid_Config_Id = actually_Paid_Config_Id;
}
public void setActually_Paid_Config_Id(Integer actually_Paid_Config_Id) {
this.actually_Paid_Config_Id = actually_Paid_Config_Id;
}
public Integer getEnterprise_Id() {
return enterprise_Id;
}
public Integer getEnterprise_Id() {
return enterprise_Id;
}
public void setEnterprise_Id(Integer enterprise_Id) {
this.enterprise_Id = enterprise_Id;
}
public void setEnterprise_Id(Integer enterprise_Id) {
this.enterprise_Id = enterprise_Id;
}
public Integer getClassify() {
return classify;
}
public Integer getClassify() {
return classify;
}
public void setClassify(Integer classify) {
this.classify = classify;
}
public void setClassify(Integer classify) {
this.classify = classify;
}
public Integer getConfig_Status() {
return config_Status;
}
public Integer getConfig_Status() {
return config_Status;
}
public void setConfig_Status(Integer config_Status) {
this.config_Status = config_Status;
}
public void setConfig_Status(Integer config_Status) {
this.config_Status = config_Status;
}
public Integer getStatus() {
return status;
}
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public void setStatus(Integer status) {
this.status = status;
}
public java.sql.Timestamp getCreate_Time() {
return create_Time;
}
public java.sql.Timestamp getCreate_Time() {
return create_Time;
}
public void setCreate_Time(java.sql.Timestamp create_Time) {
this.create_Time = create_Time;
}
public void setCreate_Time(java.sql.Timestamp create_Time) {
this.create_Time = create_Time;
}
public java.sql.Timestamp getUpdate_Time() {
return update_Time;
}
public java.sql.Timestamp getUpdate_Time() {
return update_Time;
}
public void setUpdate_Time(java.sql.Timestamp update_Time) {
this.update_Time = update_Time;
}
public void setUpdate_Time(java.sql.Timestamp update_Time) {
this.update_Time = update_Time;
}
}
package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceHive;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.AbstractFilterRequestTime;
......@@ -8,6 +9,7 @@ import com.gic.spark.util.ConstantUtil;
import com.gic.spark.util.DateUtil;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.Optional;
import org.apache.spark.sql.Row;
import scala.Tuple2;
import java.util.ArrayList;
......@@ -25,6 +27,13 @@ public abstract class AbstractTagConsumRecordFilter implements BaseTagFilter {
DataSourceHive dataSourceHiveOrder = new DataSourceHive(ConstantUtil.DWD_GIC_TRD_VIRTUAL_WDORDER_D);
DataSourceHive dataSourceHiveOrderItem = new DataSourceHive(ConstantUtil.DWD_GIC_TRD_VIRTUAL_ORDER_ITEM_D);
protected void getOrderRdds(int enterpriseId) {
JavaRDD<TrdVirtualOrderBean> consumeRecordRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId), TrdVirtualOrderBean.class).javaRDD();
JavaRDD<Row> virtualOrderItemRdd = dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId).select("virtual_order_id", "ent_brand_id").javaRDD();
}
protected static JavaRDD<TrdVirtualOrderBean> statisticsTypeHandle(JavaRDD<Tuple2<TrdVirtualOrderBean, Optional<Iterable<String>>>> orderRdd, AbstractFilterRequest request) {
JavaRDD<TrdVirtualOrderBean> consumeRecordRDD = orderRdd.mapPartitions(data -> {
List<TrdVirtualOrderBean> result = new ArrayList();
......
package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.entity.FilterProcessEntity;
import com.gic.spark.entity.request.AbstractFilterRequest;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import java.util.List;
......@@ -16,4 +18,6 @@ public interface BaseTagFilter {
List<DataSourceEntity> necessarySourceList();
JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request);
JavaPairRDD<Long, String> filterValidMember(Integer enterpriseId, List<FilterProcessEntity> processEntityList);
}
......@@ -2,17 +2,22 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdEcuBrandLabelBean;
import com.gic.spark.entity.bean.TrdEcuSalesLabelBean;
import com.gic.spark.entity.FilterProcessEntity;
import com.gic.spark.entity.bean.TrdEcuBrandBean;
import com.gic.spark.entity.bean.TrdEcuSalesBeanBase;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeDoubleRequest;
import com.gic.spark.util.CommonUtil;
import com.google.common.base.Joiner;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.Optional;
import scala.Tuple2;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* @description: 平均折扣率
......@@ -45,14 +50,14 @@ public class TagAverageDiscountFactorFilter extends AbstractTagConsumFilter {
@Override
public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeDoubleRequest consumeRequest = (TagConsumeDoubleRequest) request;
JavaRDD<TrdEcuSalesLabelBean> salesLabelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveSalesLabel.getDatasetByEntId(enterpriseId), TrdEcuSalesLabelBean.class).javaRDD();
JavaRDD<TrdEcuBrandLabelBean> brandLabelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveBrandLabel.getDatasetByEntId(enterpriseId), TrdEcuBrandLabelBean.class).javaRDD();
JavaRDD<TrdEcuSalesBeanBase> salesLabelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveSalesLabel.getDatasetByEntId(enterpriseId), TrdEcuSalesBeanBase.class).javaRDD();
JavaRDD<TrdEcuBrandBean> brandLabelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveBrandLabel.getDatasetByEntId(enterpriseId), TrdEcuBrandBean.class).javaRDD();
JavaRDD<Tuple2<TrdEcuSalesLabelBean, Optional<Iterable<TrdEcuBrandLabelBean>>>> labelRDD = salesLabelRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data))
JavaRDD<Tuple2<TrdEcuSalesBeanBase, Optional<Iterable<TrdEcuBrandBean>>>> labelRDD = salesLabelRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data))
.leftOuterJoin(brandLabelRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data)).groupByKey())
.map(data -> data._2());
JavaRDD<TrdEcuSalesLabelBean> consumeRDD = statisticsTypeHandle(labelRDD, consumeRequest);
JavaRDD<TrdEcuSalesBeanBase> consumeRDD = statisticsTypeHandle(labelRDD, consumeRequest);
int configStatus = CommonUtil.getConfigStatus(enterpriseId);
JavaRDD<Long> ecuRdd = consumeRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data))
......@@ -65,7 +70,7 @@ public class TagAverageDiscountFactorFilter extends AbstractTagConsumFilter {
.mapPartitions(data -> {
List<Long> result = new ArrayList();
while (data.hasNext()) {
Tuple2<Long, TrdEcuSalesLabelBean> tp2 = data.next();
Tuple2<Long, TrdEcuSalesBeanBase> tp2 = data.next();
double avgDiscountRate = 1 == configStatus ? CommonUtil.isEmptyDouble2double(tp2._2().getPay_amt()) / CommonUtil.isEmptyDouble2double(tp2._2().getTotal_amt())
: CommonUtil.isEmptyDouble2double(tp2._2().getReceive_amt()) / CommonUtil.isEmptyDouble2double(tp2._2().getTotal_amt());
switch (consumeRequest.getNumberType()) {
......@@ -107,4 +112,39 @@ public class TagAverageDiscountFactorFilter extends AbstractTagConsumFilter {
});
return ecuRdd;
}
@Override
public JavaPairRDD<Long, String> filterValidMember(Integer enterpriseId, List<FilterProcessEntity> processEntityList) {
JavaRDD<ConsumeStatisticEntity> entityJavaRDD = getConsumeEntity(enterpriseId);
int configStatus = CommonUtil.getConfigStatus(enterpriseId);
JavaPairRDD<Long, String> result = entityJavaRDD.mapToPair(data -> {
Set<String> groupIds = new HashSet<>();
for (FilterProcessEntity entity : processEntityList) {
TagConsumeDoubleRequest consumeRequest = (TagConsumeDoubleRequest) entity.getRequest();
List<TrdEcuSalesBeanBase> salesBeanList = filterSalesBean(data, entity.getRequest());
if (salesBeanList.size() == 0) {
continue;
}
double receiveAmount = 0;
double payAmount = 0;
double totalAmount = 0;
for (TrdEcuSalesBeanBase beanBase : salesBeanList) {
receiveAmount += beanBase.getReceive_amt();
payAmount += beanBase.getPay_amt();
totalAmount += beanBase.getTotal_amt();
}
double avgDiscountRate = 1 == configStatus ? payAmount / totalAmount
: receiveAmount / totalAmount;
String tagGroupId = entity.getTagGroupId();
handleValueCompare(groupIds, consumeRequest, tagGroupId, avgDiscountRate);
}
return Tuple2.apply(data.ecuId, groupIds.size() == 0 ? null : Joiner.on(" ").join(groupIds));
}).filter(data -> data._2() != null);
return result;
}
}
......@@ -2,6 +2,7 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.FilterProcessEntity;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.bean.TrdVirtualOrderItemBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
......@@ -48,10 +49,10 @@ public class TagConsumeCommodityFilter extends AbstractTagConsumRecordFilter {
TagConsumeCommodityRequest commodityRequest = (TagConsumeCommodityRequest) request;
JavaRDD<TrdVirtualOrderBean> consumeRecordRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId), TrdVirtualOrderBean.class).javaRDD();
Dataset<Row> OrderItemDS = dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId);
Dataset<Row> orderItemDS = dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId);
JavaRDD<Tuple2<TrdVirtualOrderBean, Optional<Iterable<String>>>> orderAndItemRdd = consumeRecordRDD.mapToPair(data -> Tuple2.apply(data.getVirtual_id(), data))
.leftOuterJoin(OrderItemDS.select("virtual_order_id", "ent_brand_id").javaRDD()
.leftOuterJoin(orderItemDS.select("virtual_order_id", "ent_brand_id").javaRDD()
.mapToPair(row -> Tuple2.apply(row.getLong(0), row.getString(1)))
.groupByKey())
.map(data -> data._2());
......@@ -61,7 +62,7 @@ public class TagConsumeCommodityFilter extends AbstractTagConsumRecordFilter {
.filter(data -> checkTime(commodityRequest, DateUtil.strToDate(data.getReceipts_time(), DateUtil.FORMAT_DATETIME_19).getTime()))
.mapToPair(data -> Tuple2.apply(data.getVirtual_id(), data.getEcu_id()));
JavaPairRDD<Long, Long> orderItemRDD = MysqlRddManager.getPojoFromDataset(OrderItemDS, TrdVirtualOrderItemBean.class).javaRDD()
JavaPairRDD<Long, Long> orderItemRDD = MysqlRddManager.getPojoFromDataset(orderItemDS, TrdVirtualOrderItemBean.class).javaRDD()
.filter(data -> {
if (StringUtils.isNotEmpty(data.getSku_code())
&& commodityRequest.getSkuCodeList().contains(data.getSku_code())) {
......@@ -78,4 +79,10 @@ public class TagConsumeCommodityFilter extends AbstractTagConsumRecordFilter {
return ecuRdd;
}
@Override
public JavaPairRDD<Long, String> filterValidMember(Integer enterpriseId, List<FilterProcessEntity> processEntityList) {
return null;
}
}
......@@ -7,7 +7,6 @@ import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeTimeRequest;
import com.gic.spark.util.DateUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.Optional;
import org.apache.spark.sql.Column;
......@@ -22,18 +21,19 @@ import java.util.List;
* @author: wangxk
* @date: 2020/8/10
*/
public class TagConsumeTimeFilter extends AbstractTagConsumRecordFilter{
public class TagConsumeTimeFilter extends AbstractTagConsumRecordFilter {
private static TagConsumeTimeFilter instance;
public static TagConsumeTimeFilter getInstance() {
if(null==instance){
instance=new TagConsumeTimeFilter();
if (null == instance) {
instance = new TagConsumeTimeFilter();
}
return instance;
}
private TagConsumeTimeFilter(){}
private TagConsumeTimeFilter() {
}
@Override
public List<DataSourceEntity> necessarySourceList() {
......@@ -45,22 +45,22 @@ public class TagConsumeTimeFilter extends AbstractTagConsumRecordFilter{
@Override
public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeTimeRequest consumeTimeRequest=(TagConsumeTimeRequest)request;
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId)
.filter(new Column("is_eff_order").equalTo(1)), TrdVirtualOrderBean.class).javaRDD();
TagConsumeTimeRequest consumeTimeRequest = (TagConsumeTimeRequest) request;
JavaRDD<TrdVirtualOrderBean> consumeRecordRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId)
.filter(new Column("is_eff_order").equalTo(1)), TrdVirtualOrderBean.class).javaRDD();
JavaRDD<Row>virtualOrderItemRdd=dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId).select("virtual_order_id","ent_brand_id").javaRDD();
JavaRDD<Row> virtualOrderItemRdd = dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId).select("virtual_order_id", "ent_brand_id").javaRDD();
JavaRDD<Tuple2<TrdVirtualOrderBean,Optional<Iterable<String>>>>orderRdd=consumeRecordRDD.mapToPair(data->Tuple2.apply(data.getVirtual_id(),data))
.leftOuterJoin(virtualOrderItemRdd.mapToPair(row->Tuple2.apply(row.getLong(0),row.getString(1))).groupByKey())
.map(data->data._2());
JavaRDD<Tuple2<TrdVirtualOrderBean, Optional<Iterable<String>>>> orderRdd = consumeRecordRDD.mapToPair(data -> Tuple2.apply(data.getVirtual_id(), data))
.leftOuterJoin(virtualOrderItemRdd.mapToPair(row -> Tuple2.apply(row.getLong(0), row.getString(1))).groupByKey())
.map(data -> data._2());
consumeRecordRDD=statisticsTypeHandle(orderRdd,consumeTimeRequest);
JavaRDD<Long>ecuRdd=consumeRecordRDD.filter(data-> StringUtils.isNotEmpty(data.getReceipts_time()))
.mapToPair(data-> Tuple2.apply(data.getEcu_id(), DateUtil.strToDate(data.getReceipts_time(),DateUtil.FORMAT_DATETIME_19)))
.filter(data->checkTime(consumeTimeRequest,data._2().getTime()))
.reduceByKey((x,y)->x)
.map(data->data._1());
consumeRecordRDD = statisticsTypeHandle(orderRdd, consumeTimeRequest);
JavaRDD<Long> ecuRdd = consumeRecordRDD.filter(data -> StringUtils.isNotEmpty(data.getReceipts_time()))
.mapToPair(data -> Tuple2.apply(data.getEcu_id(), DateUtil.strToDate(data.getReceipts_time(), DateUtil.FORMAT_DATETIME_19)))
.filter(data -> checkTime(consumeTimeRequest, data._2().getTime()))
.reduceByKey((x, y) -> x)
.map(data -> data._1());
return ecuRdd;
}
}
package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.entity.DataSourceHive;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.bean.TrdVirtualOrderItemBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeCommodityRequest;
import com.gic.spark.util.ConstantUtil;
import com.gic.spark.util.DateUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.api.java.JavaPairRDD;
......@@ -25,14 +23,14 @@ import java.util.List;
* @author: wangxk
* @date: 2020/8/12
*/
public class TagFirstConsumeCommodityFilter extends AbstractTagConsumRecordFilter{
public class TagFirstConsumeCommodityFilter extends AbstractTagConsumRecordFilter {
private static TagFirstConsumeCommodityFilter instance;
public static TagFirstConsumeCommodityFilter getInstance() {
if(null==instance){
instance=new TagFirstConsumeCommodityFilter();
if (null == instance) {
instance = new TagFirstConsumeCommodityFilter();
}
return instance;
}
......@@ -47,45 +45,47 @@ public class TagFirstConsumeCommodityFilter extends AbstractTagConsumRecordFilte
@Override
public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeCommodityRequest commodityRequest=(TagConsumeCommodityRequest)request;
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId),TrdVirtualOrderBean.class).javaRDD();
Dataset<Row> OrderItemDS= dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId);
JavaRDD<Tuple2<TrdVirtualOrderBean,Optional<Iterable<String>>>>orderAndItemRdd=consumeRecordRDD.mapToPair(data->Tuple2.apply(data.getVirtual_id(),data))
.leftOuterJoin(OrderItemDS.select("virtual_order_id","ent_brand_id").javaRDD()
.mapToPair(row->Tuple2.apply(row.getLong(0),row.getString(1)))
.groupByKey())
.map(data->data._2());
consumeRecordRDD=statisticsTypeHandle(orderAndItemRdd,commodityRequest);
JavaPairRDD<Long,Long>orderRdd= consumeRecordRDD.filter(data-> StringUtils.isNotEmpty(data.getReceipts_time()))
.mapToPair(data-> Tuple2.apply(data.getEcu_id(),data))
.reduceByKey((x,y)->{
if(DateUtil.strToDate(x.getReceipts_time(),DateUtil.FORMAT_DATETIME_19).getTime()
<DateUtil.strToDate(y.getReceipts_time(),DateUtil.FORMAT_DATETIME_19).getTime()){
return x;
}else{
return y;
}
})
.mapToPair(data->Tuple2.apply(data._2().getVirtual_id(),data._1()));
JavaPairRDD<Long,Long> orderItemRDD=MysqlRddManager.getPojoFromDataset(OrderItemDS,TrdVirtualOrderItemBean.class).javaRDD()
.filter(data->{
if(StringUtils.isNotEmpty(data.getSku_code())
&&commodityRequest.getSkuCodeList().contains(data.getSku_code())){
return true;
}
return false;
}).mapToPair(data->Tuple2.apply(data.getVirtual_order_id(),data.getVirtual_order_id()))
.reduceByKey((x,y)->x);
JavaRDD<Long>ecuRdd=orderRdd.leftOuterJoin(orderItemRDD)
.filter(data->data._2()._2().isPresent())
.map(data->data._2()._1()).distinct();
return ecuRdd;
TagConsumeCommodityRequest commodityRequest = (TagConsumeCommodityRequest) request;
JavaRDD<TrdVirtualOrderBean> consumeRecordRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId), TrdVirtualOrderBean.class).javaRDD();
Dataset<Row> orderItemDS = dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId);
JavaRDD<Tuple2<TrdVirtualOrderBean, Optional<Iterable<String>>>> orderAndItemRdd = consumeRecordRDD.mapToPair(data -> Tuple2.apply(data.getVirtual_id(), data))
.leftOuterJoin(orderItemDS.select("virtual_order_id", "ent_brand_id").javaRDD()
.mapToPair(row -> Tuple2.apply(row.getLong(0), row.getString(1)))
.groupByKey())
.map(data -> data._2());
consumeRecordRDD = statisticsTypeHandle(orderAndItemRdd, commodityRequest);
JavaPairRDD<Long, Long> orderRdd = consumeRecordRDD.filter(data -> StringUtils.isNotEmpty(data.getReceipts_time()))
.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data))
.reduceByKey((x, y) -> {
if (DateUtil.strToDate(x.getReceipts_time(), DateUtil.FORMAT_DATETIME_19).getTime()
< DateUtil.strToDate(y.getReceipts_time(), DateUtil.FORMAT_DATETIME_19).getTime()) {
return x;
} else {
return y;
}
})
.mapToPair(data -> Tuple2.apply(data._2().getVirtual_id(), data._1()));
JavaPairRDD<Long, Long> orderItemRDD = MysqlRddManager.getPojoFromDataset(orderItemDS, TrdVirtualOrderItemBean.class).javaRDD()
.filter(data -> {
if (StringUtils.isNotEmpty(data.getSku_code())
&& commodityRequest.getSkuCodeList().contains(data.getSku_code())) {
return true;
}
return false;
}).mapToPair(data -> Tuple2.apply(data.getVirtual_order_id(), data.getVirtual_order_id()))
.reduceByKey((x, y) -> x);
JavaRDD<Long> ecuRdd = orderRdd.leftOuterJoin(orderItemRDD)
.filter(data -> data._2()._2().isPresent())
.map(data -> data._2()._1()).distinct();
return ecuRdd;
}
}
......@@ -5,7 +5,6 @@ import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeStoreRequest;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.Optional;
import org.apache.spark.sql.Row;
......@@ -19,18 +18,19 @@ import java.util.List;
* @author: wangxk
* @date: 2020/8/11
*/
public class TagHistoryOfflineConsumptionStoreFilter extends AbstractTagConsumRecordFilter{
public class TagHistoryOfflineConsumptionStoreFilter extends AbstractTagConsumRecordFilter {
private static TagHistoryOfflineConsumptionStoreFilter instance;
public static TagHistoryOfflineConsumptionStoreFilter getInstance() {
if(null==instance){
instance=new TagHistoryOfflineConsumptionStoreFilter();
if (null == instance) {
instance = new TagHistoryOfflineConsumptionStoreFilter();
}
return instance;
}
private TagHistoryOfflineConsumptionStoreFilter(){}
private TagHistoryOfflineConsumptionStoreFilter() {
}
@Override
public List<DataSourceEntity> necessarySourceList() {
......@@ -42,22 +42,22 @@ public class TagHistoryOfflineConsumptionStoreFilter extends AbstractTagConsumRe
@Override
public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeStoreRequest storeRequest=(TagConsumeStoreRequest)request;
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId),TrdVirtualOrderBean.class).javaRDD();
TagConsumeStoreRequest storeRequest = (TagConsumeStoreRequest) request;
JavaRDD<TrdVirtualOrderBean> consumeRecordRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId), TrdVirtualOrderBean.class).javaRDD();
JavaRDD<Row>virtualOrderItemRdd=dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId).select("virtual_order_id","ent_brand_id").toJavaRDD();
JavaRDD<Row> virtualOrderItemRdd = dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId).select("virtual_order_id", "ent_brand_id").toJavaRDD();
JavaRDD<Tuple2<TrdVirtualOrderBean,Optional<Iterable<String>>>>orderRdd=consumeRecordRDD.mapToPair(data->Tuple2.apply(data.getVirtual_id(),data))
.leftOuterJoin(virtualOrderItemRdd.mapToPair(row->Tuple2.apply(row.getLong(0),row.getString(1))).groupByKey())
.map(data->data._2());
JavaRDD<Tuple2<TrdVirtualOrderBean, Optional<Iterable<String>>>> orderRdd = consumeRecordRDD.mapToPair(data -> Tuple2.apply(data.getVirtual_id(), data))
.leftOuterJoin(virtualOrderItemRdd.mapToPair(row -> Tuple2.apply(row.getLong(0), row.getString(1))).groupByKey())
.map(data -> data._2());
consumeRecordRDD=statisticsTypeHandle(orderRdd,storeRequest);
JavaRDD<Long>ecuRdd=consumeRecordRDD.filter(data->data.getOrder_channel_code()==1
&& null!=data.getStore_info_id())
.mapToPair(data-> Tuple2.apply(data.getEcu_id(),data.getStore_info_id()))
.filter(data->storeRequest.getStoreList().contains(String.valueOf(data._2())))
.reduceByKey((x,y)->x)
.map(data->data._1());
consumeRecordRDD = statisticsTypeHandle(orderRdd, storeRequest);
JavaRDD<Long> ecuRdd = consumeRecordRDD.filter(data -> data.getOrder_channel_code() == 1
&& null != data.getStore_info_id())
.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data.getStore_info_id()))
.filter(data -> storeRequest.getStoreList().contains(String.valueOf(data._2())))
.reduceByKey((x, y) -> x)
.map(data -> data._1());
return ecuRdd;
}
......
package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.entity.DataSourceHive;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdVirtualOrderBean;
import com.gic.spark.entity.bean.TrdVirtualOrderItemBean;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeCommodityRequest;
import com.gic.spark.util.ConstantUtil;
import com.gic.spark.util.DateUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.api.java.JavaPairRDD;
......@@ -25,14 +23,14 @@ import java.util.List;
* @author: wangxk
* @date: 2020/8/12
*/
public class TagLatelyConsumeCommodityFilter extends AbstractTagConsumRecordFilter{
public class TagLatelyConsumeCommodityFilter extends AbstractTagConsumRecordFilter {
private static TagLatelyConsumeCommodityFilter instance;
public static TagLatelyConsumeCommodityFilter getInstance() {
if(null==instance){
instance=new TagLatelyConsumeCommodityFilter();
if (null == instance) {
instance = new TagLatelyConsumeCommodityFilter();
}
return instance;
}
......@@ -47,45 +45,45 @@ public class TagLatelyConsumeCommodityFilter extends AbstractTagConsumRecordFilt
@Override
public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeCommodityRequest commodityRequest=(TagConsumeCommodityRequest)request;
JavaRDD<TrdVirtualOrderBean>consumeRecordRDD= MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId),TrdVirtualOrderBean.class).javaRDD();
TagConsumeCommodityRequest commodityRequest = (TagConsumeCommodityRequest) request;
JavaRDD<TrdVirtualOrderBean> consumeRecordRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveOrder.getDatasetByEntId(enterpriseId), TrdVirtualOrderBean.class).javaRDD();
Dataset<Row> OrderItemDS= dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId);
Dataset<Row> OrderItemDS = dataSourceHiveOrderItem.getDatasetByEntId(enterpriseId);
JavaRDD<Tuple2<TrdVirtualOrderBean,Optional<Iterable<String>>>>orderAndItemRdd=consumeRecordRDD.mapToPair(data->Tuple2.apply(data.getVirtual_id(),data))
.leftOuterJoin(OrderItemDS.select("virtual_order_id","ent_brand_id").javaRDD()
.mapToPair(row->Tuple2.apply(row.getLong(0),row.getString(1)))
.groupByKey())
.map(data->data._2());
JavaRDD<Tuple2<TrdVirtualOrderBean, Optional<Iterable<String>>>> orderAndItemRdd = consumeRecordRDD.mapToPair(data -> Tuple2.apply(data.getVirtual_id(), data))
.leftOuterJoin(OrderItemDS.select("virtual_order_id", "ent_brand_id").javaRDD()
.mapToPair(row -> Tuple2.apply(row.getLong(0), row.getString(1)))
.groupByKey())
.map(data -> data._2());
consumeRecordRDD=statisticsTypeHandle(orderAndItemRdd,commodityRequest);
JavaPairRDD<Long,Long>orderRdd= consumeRecordRDD.filter(data-> StringUtils.isNotEmpty(data.getReceipts_time()))
.mapToPair(data-> Tuple2.apply(data.getEcu_id(),data))
.reduceByKey((x,y)->{
if(DateUtil.strToDate(x.getReceipts_time(),DateUtil.FORMAT_DATETIME_19).getTime()
>DateUtil.strToDate(y.getReceipts_time(),DateUtil.FORMAT_DATETIME_19).getTime()){
return x;
}else{
return y;
}
})
.mapToPair(data->Tuple2.apply(data._2().getVirtual_id(),data._1()));
consumeRecordRDD = statisticsTypeHandle(orderAndItemRdd, commodityRequest);
JavaPairRDD<Long, Long> orderRdd = consumeRecordRDD.filter(data -> StringUtils.isNotEmpty(data.getReceipts_time()))
.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data))
.reduceByKey((x, y) -> {
if (DateUtil.strToDate(x.getReceipts_time(), DateUtil.FORMAT_DATETIME_19).getTime()
> DateUtil.strToDate(y.getReceipts_time(), DateUtil.FORMAT_DATETIME_19).getTime()) {
return x;
} else {
return y;
}
})
.mapToPair(data -> Tuple2.apply(data._2().getVirtual_id(), data._1()));
JavaPairRDD<Long,Long> orderItemRDD=MysqlRddManager.getPojoFromDataset(OrderItemDS,TrdVirtualOrderItemBean.class).javaRDD()
.filter(data->{
if(StringUtils.isNotEmpty(data.getSku_code())
&&commodityRequest.getSkuCodeList().contains(data.getSku_code())){
return true;
}
return false;
}).mapToPair(data->Tuple2.apply(data.getVirtual_order_id(),data.getVirtual_order_id()))
.reduceByKey((x,y)->x);
JavaPairRDD<Long, Long> orderItemRDD = MysqlRddManager.getPojoFromDataset(OrderItemDS, TrdVirtualOrderItemBean.class).javaRDD()
.filter(data -> {
if (StringUtils.isNotEmpty(data.getSku_code())
&& commodityRequest.getSkuCodeList().contains(data.getSku_code())) {
return true;
}
return false;
}).mapToPair(data -> Tuple2.apply(data.getVirtual_order_id(), data.getVirtual_order_id()))
.reduceByKey((x, y) -> x);
JavaRDD<Long>ecuRdd=orderRdd.leftOuterJoin(orderItemRDD)
.filter(data->data._2()._2().isPresent())
.map(data->data._2()._1()).distinct();
JavaRDD<Long> ecuRdd = orderRdd.leftOuterJoin(orderItemRDD)
.filter(data -> data._2()._2().isPresent())
.map(data -> data._2()._1()).distinct();
return ecuRdd;
return ecuRdd;
}
}
......@@ -2,35 +2,43 @@ package com.gic.spark.filter;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.mysql.MysqlRddManager;
import com.gic.spark.entity.bean.TrdEcuBrandLabelBean;
import com.gic.spark.entity.bean.TrdEcuSalesLabelBean;
import com.gic.spark.entity.FilterProcessEntity;
import com.gic.spark.entity.bean.TrdEcuBrandBean;
import com.gic.spark.entity.bean.TrdEcuSalesBeanBase;
import com.gic.spark.entity.request.AbstractFilterRequest;
import com.gic.spark.entity.request.TagConsumeDoubleRequest;
import com.gic.spark.entity.request.TagConsumeRequest;
import com.gic.spark.util.CommonUtil;
import com.google.common.base.Joiner;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.Optional;
import scala.Tuple2;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* @description:
* @author: wangxk
* @date: 2020/8/26
*/
public class TagUnitPriceFilter extends AbstractTagConsumFilter{
public class TagUnitPriceFilter extends AbstractTagConsumFilter {
private static TagUnitPriceFilter instance;
public static TagUnitPriceFilter getInstance() {
if(null==instance){
instance=new TagUnitPriceFilter();
if (null == instance) {
instance = new TagUnitPriceFilter();
}
return instance;
}
private TagUnitPriceFilter(){}
private TagUnitPriceFilter() {
}
@Override
public List<DataSourceEntity> necessarySourceList() {
List<DataSourceEntity> result = new ArrayList();
......@@ -41,67 +49,106 @@ public class TagUnitPriceFilter extends AbstractTagConsumFilter{
@Override
public JavaRDD<Long> filterValidMember(Integer enterpriseId, AbstractFilterRequest request) {
TagConsumeRequest consumeRequest=(TagConsumeRequest)request;
JavaRDD<TrdEcuSalesLabelBean> salesLabelRDD=MysqlRddManager.getPojoFromDataset(dataSourceHiveSalesLabel.getDatasetByEntId(enterpriseId), TrdEcuSalesLabelBean.class).javaRDD();
JavaRDD<TrdEcuBrandLabelBean> brandLabelRDD=MysqlRddManager.getPojoFromDataset(dataSourceHiveBrandLabel.getDatasetByEntId(enterpriseId), TrdEcuBrandLabelBean.class).javaRDD();
TagConsumeRequest consumeRequest = (TagConsumeRequest) request;
JavaRDD<TrdEcuSalesBeanBase> salesLabelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveSalesLabel.getDatasetByEntId(enterpriseId), TrdEcuSalesBeanBase.class).javaRDD();
JavaRDD<TrdEcuBrandBean> brandLabelRDD = MysqlRddManager.getPojoFromDataset(dataSourceHiveBrandLabel.getDatasetByEntId(enterpriseId), TrdEcuBrandBean.class).javaRDD();
JavaRDD<Tuple2<TrdEcuSalesLabelBean,Optional<Iterable<TrdEcuBrandLabelBean>>>>labelRDD=salesLabelRDD.mapToPair(data->Tuple2.apply(data.getEcu_id(),data))
.leftOuterJoin(brandLabelRDD.mapToPair(data->Tuple2.apply(data.getEcu_id(),data)).groupByKey())
.map(data->data._2());
JavaRDD<Tuple2<TrdEcuSalesBeanBase, Optional<Iterable<TrdEcuBrandBean>>>> labelRDD = salesLabelRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data))
.leftOuterJoin(brandLabelRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data)).groupByKey())
.map(data -> data._2());
JavaRDD<TrdEcuSalesLabelBean> consumeRDD=statisticsTypeHandle(labelRDD,consumeRequest);
JavaRDD<TrdEcuSalesBeanBase> consumeRDD = statisticsTypeHandle(labelRDD, consumeRequest);
int configStatus= CommonUtil.getConfigStatus(enterpriseId);
JavaRDD<Long>ecuRdd=consumeRDD.mapToPair(data-> Tuple2.apply(data.getEcu_id(),data))
.reduceByKey((x,y)->{
x.setReceive_amt(x.getReceive_amt()+y.getReceive_amt());
x.setPay_amt(x.getPay_amt()+y.getPay_amt());
x.setOrder_times(x.getOrder_times()+y.getOrder_times());
int configStatus = CommonUtil.getConfigStatus(enterpriseId);
JavaRDD<Long> ecuRdd = consumeRDD.mapToPair(data -> Tuple2.apply(data.getEcu_id(), data))
.reduceByKey((x, y) -> {
x.setReceive_amt(x.getReceive_amt() + y.getReceive_amt());
x.setPay_amt(x.getPay_amt() + y.getPay_amt());
x.setOrder_times(x.getOrder_times() + y.getOrder_times());
return x;
})
.mapPartitions(data->{
List<Long> result=new ArrayList();
while (data.hasNext()){
Tuple2<Long,TrdEcuSalesLabelBean> tp2=data.next();
double CusSinglePiece=1==configStatus?CommonUtil.isEmptyDouble2double(tp2._2().getPay_amt())/CommonUtil.isEmptyInteger2int(tp2._2().getSeff_goods_num())
:CommonUtil.isEmptyDouble2double(tp2._2().getReceive_amt())/CommonUtil.isEmptyInteger2int(tp2._2().getSeff_goods_num());
switch (consumeRequest.getNumberType()){
.mapPartitions(data -> {
List<Long> result = new ArrayList();
while (data.hasNext()) {
Tuple2<Long, TrdEcuSalesBeanBase> tp2 = data.next();
double CusSinglePiece = 1 == configStatus ? CommonUtil.isEmptyDouble2double(tp2._2().getPay_amt()) / CommonUtil.isEmptyInteger2int(tp2._2().getSeff_goods_num())
: CommonUtil.isEmptyDouble2double(tp2._2().getReceive_amt()) / CommonUtil.isEmptyInteger2int(tp2._2().getSeff_goods_num());
switch (consumeRequest.getNumberType()) {
case gt:
if(CusSinglePiece>consumeRequest.getBeginNum()){
if (CusSinglePiece > consumeRequest.getBeginNum()) {
result.add(tp2._1());
}
break;
case gte:
if(CusSinglePiece>=consumeRequest.getBeginNum()){
if (CusSinglePiece >= consumeRequest.getBeginNum()) {
result.add(tp2._1());
}
break;
case lt:
if(CusSinglePiece<consumeRequest.getEndNum()){
if (CusSinglePiece < consumeRequest.getEndNum()) {
result.add(tp2._1());
}
break;
case lte:
if(CusSinglePiece<=consumeRequest.getEndNum()){
if (CusSinglePiece <= consumeRequest.getEndNum()) {
result.add(tp2._1());
}
break;
case eq:
if(CusSinglePiece==consumeRequest.getEqualNum()){
if (CusSinglePiece == consumeRequest.getEqualNum()) {
result.add(tp2._1());
}
break;
case between:
if(CusSinglePiece>=consumeRequest.getBeginNum()
&&CusSinglePiece<=consumeRequest.getEndNum()){
if (CusSinglePiece >= consumeRequest.getBeginNum()
&& CusSinglePiece <= consumeRequest.getEndNum()) {
result.add(tp2._1());
}
default:break;
default:
break;
}
}
return result.iterator();
});
return ecuRdd;
}
@Override
public JavaPairRDD<Long, String> filterValidMember(Integer enterpriseId, List<FilterProcessEntity> processEntityList) {
JavaRDD<ConsumeStatisticEntity> entityJavaRDD = getConsumeEntity(enterpriseId);
int configStatus = CommonUtil.getConfigStatus(enterpriseId);
JavaPairRDD<Long, String> rdd = entityJavaRDD.mapToPair(data -> {
Set<String> groupIds = new HashSet<>();
for (FilterProcessEntity entity : processEntityList) {
TagConsumeDoubleRequest consumeRequest = (TagConsumeDoubleRequest) entity.getRequest();
List<TrdEcuSalesBeanBase> salseBeanList = filterSalesBean(data, entity.getRequest());
if (salseBeanList.size() == 0) {
continue;
}
String tagGroupId = entity.getTagGroupId();
double receiveAmount = 0;
double payAmount = 0;
int orderTimes = 0;
for (TrdEcuSalesBeanBase beanBase : salseBeanList) {
receiveAmount += beanBase.getReceive_amt();
payAmount += beanBase.getPay_amt();
orderTimes += beanBase.getSeff_goods_num();
}
double cusSinglePiece = 1 == configStatus ? payAmount / orderTimes
: receiveAmount / orderTimes;
handleValueCompare(groupIds, consumeRequest, tagGroupId, cusSinglePiece);
}
return Tuple2.apply(data.ecuId, groupIds.size() == 0 ? null : Joiner.on(" ").join(groupIds));
}).filter(data -> data._2() != null);
return rdd;
}
}
......@@ -12,6 +12,9 @@ import java.util.List;
public class TagProcessEntity {
int enterpriseId;
long tagGroupId;
/**
* 1 实时 2 非实时
*/
Integer realTime;
int level;
List<TagConditionDTO> tagList;
......
......@@ -57,7 +57,8 @@ public class TagProcessManager {
private List<SceneCrowdDTO> sceneCrowdDTOList = new ArrayList();
private MysqlRddManager member4RddManager;
private MysqlRddManager enterprise4RddManager;
private DataSourceSharding memberSharding4Datasource;
private DataSourceSharding enterpriseUserDatasource;
private DataSourceSharding enterpriseUserRelationDatasource;
private MysqlDatasource member4Datasource = null;
private MysqlDatasource enterprise4Datasource = null;
private boolean isProduction;
......@@ -98,7 +99,8 @@ public class TagProcessManager {
}
member4RddManager = member4Datasource.buildRddManager();
enterprise4RddManager = enterprise4Datasource.buildRddManager();
memberSharding4Datasource = new DataSourceSharding(AppEnvUtil.MEMBER_SHARDING_4, ConstantUtil.TAB_ENTERPRISE_USER);
enterpriseUserDatasource = new DataSourceSharding(AppEnvUtil.MEMBER_SHARDING_4, ConstantUtil.TAB_ENTERPRISE_USER);
enterpriseUserRelationDatasource = new DataSourceSharding(AppEnvUtil.MEMBER_SHARDING_4, ConstantUtil.TAB_ENTERPRISE_USER_RELATION);
List<TabSceneCrowd> sceneCrowdList = member4RddManager.getPojo("tab_scene_crowd", TabSceneCrowd.class, null)
.filter(new Column("delete_flag").equalTo(0))
......@@ -175,15 +177,13 @@ public class TagProcessManager {
for (TagConditionDTO conditionDTO : processEntity.tagList) {
if (tagIdToFilterMap.containsKey(conditionDTO.getTagId())) {
for (DataSourceEntity sourceEntity : tagIdToFilterMap.get(conditionDTO.getTagId()).necessarySourceList()) {
// System.out.println("enterpriseId==>"+enterpriseTagEntry.getKey());
// System.out.println("SourceKey==>"+sourceEntity.getSourceKey());
// System.out.println("HiveTableName==>"+sourceEntity.getHiveTableName());
DataSourceManager.getInstance().addSourceEntity(sourceEntity, enterpriseTagEntry.getKey().intValue());
DataSourceManager.getInstance().addSourceEntity(sourceEntity, enterpriseTagEntry.getKey());
}
}
}
}
DataSourceManager.getInstance().addSourceEntity(memberSharding4Datasource, enterpriseTagEntry.getKey().intValue());
DataSourceManager.getInstance().addSourceEntity(enterpriseUserDatasource, enterpriseTagEntry.getKey());
DataSourceManager.getInstance().addSourceEntity(enterpriseUserRelationDatasource, enterpriseTagEntry.getKey());
}
if (extractData) {
......@@ -194,8 +194,8 @@ public class TagProcessManager {
//处理标签数据
JavaSparkContext jsc = SparkEnvManager.getInstance().getJsc();
List<Long> sceneCrowdIdList = new ArrayList();
for (Map.Entry<Integer, List<TagProcessEntity>> enterpriseTagEntry : tagGroupByEnterpriseMap.entrySet()) {
List<Long> sceneCrowdIdList = new ArrayList();
Integer enterpriseId = enterpriseTagEntry.getKey();
String indexName = EsRequestUtil.getESIindexName(enterpriseId, this.isProduction());
......@@ -210,7 +210,6 @@ public class TagProcessManager {
JavaPairRDD<Long, String> filterRdd = tagFilter.filterValidMember(enterpriseId, filterRequest).mapToPair(data -> Tuple2.apply(data, groupId));
System.out.println("filterRdd==>" + filterRdd.count());
if (null == memberGroupRdd) {
memberGroupRdd = filterRdd;
} else {
......@@ -222,7 +221,8 @@ public class TagProcessManager {
}
if (null != memberGroupRdd) {
JavaPairRDD<Long, Long> userRdd = memberSharding4Datasource.getDatasetByEnterpriseId(enterpriseId).select("id").javaRDD()
JavaPairRDD<Long, Long> userRdd = enterpriseUserDatasource.getDatasetByEnterpriseId(enterpriseId).select("id", "delete_flag").javaRDD()
.filter(data -> 0 == (Integer) data.getAs("delete_flag"))
.mapToPair(data -> Tuple2.apply((Long) data.getAs("id"), (Long) data.getAs("id")))
.reduceByKey((x, y) -> x);
......@@ -255,60 +255,62 @@ public class TagProcessManager {
//处理混合标签
JavaPairRDD<Long, String> searchRDD = null;
for (TagProcessEntity mixEntity : enterpriseTagEntry.getValue()) {
if (mixEntity.realTime == 3) {
Long tagGroupId = mixEntity.tagGroupId;
String query = EsRequestUtil.getIndexParam(enterpriseId, tagGroupId, this.isProduction);
if (StringUtils.isNotEmpty(query)) {
Map<String, String> conf = new HashMap();
conf.put("es.nodes", AppEnvUtil.ES_NODES);
conf.put("es.resource", indexName + "/mapper_type");
conf.put("es.query", query);
conf.put("es.scroll.size", "5000");
JavaPairRDD<Long, String> esRdd = JavaEsSpark.esRDD(jsc, conf)
.mapToPair(data -> {
String sceneTagsB = tagGroupId.toString();
if (null != data._2().get("sceneTags_b")) {
sceneTagsB = sceneTagsB + " " + data._2().get("sceneTags_b");
}
return Tuple2.apply((Long) data._2.get("id"), sceneTagsB);
});
if (null == searchRDD) {
searchRDD = esRdd;
} else {
searchRDD = searchRDD.union(esRdd);
}
}
}
}
if (null != searchRDD) {
JavaPairRDD<Long, String> groupRDD = searchRDD.repartition(100).reduceByKey((x, y) -> x + " " + y)
.mapPartitionsToPair(data -> {
List<Tuple2<Long, String>> list = new ArrayList();
while (data.hasNext()) {
Set<String> set = new HashSet();
Tuple2<Long, String> tp2 = data.next();
String[] tagGroups = tp2._2().split(" ");
for (String tagGroup : tagGroups) {
set.add(tagGroup);
}
JSONObject json = new JSONObject();
json.put("id", tp2._1());
json.put("sceneTags_b", Joiner.on(" ").join(set));
list.add(Tuple2.apply(tp2._1(), json.toString()));
}
return list.iterator();
});
updateIndex(groupRDD, indexName);
}
/**
JavaPairRDD<Long, String> searchRDD = null;
for (TagProcessEntity mixEntity : enterpriseTagEntry.getValue()) {
if (mixEntity.realTime == 3) {
Long tagGroupId = mixEntity.tagGroupId;
String query = EsRequestUtil.getIndexParam(enterpriseId, tagGroupId, this.isProduction);
if (StringUtils.isNotEmpty(query)) {
Map<String, String> conf = new HashMap();
conf.put("es.nodes", AppEnvUtil.ES_NODES);
conf.put("es.resource", indexName + "/mapper_type");
conf.put("es.query", query);
conf.put("es.scroll.size", "5000");
JavaPairRDD<Long, String> esRdd = JavaEsSpark.esRDD(jsc, conf)
.mapToPair(data -> {
String sceneTagsB = tagGroupId.toString();
if (null != data._2().get("sceneTags_b")) {
sceneTagsB = sceneTagsB + " " + data._2().get("sceneTags_b");
}
return Tuple2.apply((Long) data._2.get("id"), sceneTagsB);
});
if (null == searchRDD) {
searchRDD = esRdd;
} else {
searchRDD = searchRDD.union(esRdd);
}
}
}
}
if (null != searchRDD) {
JavaPairRDD<Long, String> groupRDD = searchRDD.repartition(100).reduceByKey((x, y) -> x + " " + y)
.mapPartitionsToPair(data -> {
List<Tuple2<Long, String>> list = new ArrayList();
while (data.hasNext()) {
Set<String> set = new HashSet();
Tuple2<Long, String> tp2 = data.next();
String[] tagGroups = tp2._2().split(" ");
for (String tagGroup : tagGroups) {
set.add(tagGroup);
}
JSONObject json = new JSONObject();
json.put("id", tp2._1());
json.put("sceneTags_b", Joiner.on(" ").join(set));
list.add(Tuple2.apply(tp2._1(), json.toString()));
}
return list.iterator();
});
updateIndex(groupRDD, indexName);
}
*/
}
}
......
package com.gic.spark.util;
import com.gic.spark.entity.table.TabDataActuallyPaidConfig;
import scala.Tuple2;
import java.util.HashMap;
import java.util.Map;
......@@ -13,46 +12,54 @@ import java.util.Map;
*/
public class CommonUtil {
public static Map<Integer,TabDataActuallyPaidConfig> dataActuallyPaidConfigMap=new HashMap();
public static Map<Integer, TabDataActuallyPaidConfig> dataActuallyPaidConfigMap = new HashMap();
/**
* 1:实付
* 0:关闭(应付)
*
* @param enterprise_Id
* @return
*/
public static Integer getConfigStatus(Integer enterprise_Id){
TabDataActuallyPaidConfig dataActuallyPaidConfig=dataActuallyPaidConfigMap.get(enterprise_Id);
if(null==dataActuallyPaidConfig||null==dataActuallyPaidConfig.getConfig_Status()){
public static Integer getConfigStatus(Integer enterprise_Id) {
TabDataActuallyPaidConfig dataActuallyPaidConfig = dataActuallyPaidConfigMap.get(enterprise_Id);
if (null == dataActuallyPaidConfig || null == dataActuallyPaidConfig.getConfig_Status()) {
return 0;
}else{
} else {
return dataActuallyPaidConfig.getConfig_Status();
}
}
public static int isEmptyInteger2int(Integer param,int defaultValue){
return null==param?defaultValue:param;
public static int isEmptyInteger2int(Integer param, int defaultValue) {
return null == param ? defaultValue : param;
}
public static int isEmptyInteger2int(Integer param){
return isEmptyInteger2int(param,0);
public static int isEmptyInteger2int(Integer param) {
return isEmptyInteger2int(param, 0);
}
public static long isEmptyLong2long(Long param,long defaultValue){
return null==param?defaultValue:param;
public static long isEmptyLong2long(Long param, long defaultValue) {
return null == param ? defaultValue : param;
}
public static long isEmptyLong2long(Long param){
return isEmptyLong2long(param,0l);
public static long isEmptyLong2long(Long param) {
return isEmptyLong2long(param, 0l);
}
public static float isEmptyFloat2float(Float param,float defaultValue){
return null==param?defaultValue:param;
public static float isEmptyFloat2float(Float param, float defaultValue) {
return null == param ? defaultValue : param;
}
public static float isEmptyFloat2float(Float param){
return isEmptyFloat2float(param,0f);
public static float isEmptyFloat2float(Float param) {
return isEmptyFloat2float(param, 0f);
}
public static double isEmptyDouble2double(Double param,double defaultValue){
return null==param?defaultValue:param;
public static double isEmptyDouble2double(Double param, double defaultValue) {
return null == param ? defaultValue : param;
}
public static double isEmptyDouble2double(Double param){
return isEmptyDouble2double(param,0d);
public static double isEmptyDouble2double(Double param) {
return isEmptyDouble2double(param, 0d);
}
}
......@@ -8,6 +8,7 @@ package com.gic.spark.util;
public class ConstantUtil {
public static final String TAB_ENTERPRISE_USER="tab_enterprise_user";
public static final String TAB_ENTERPRISE_USER_RELATION="tab_enterprise_user_relation";
public static final String TAB_COUPON_LOG="tab_coupon_log";
public static final String TAB_INTEGRAL_CU_CHANGE_LOG="tab_integral_cu_change_log";
public static final String ADS_GIC_TRD_ECU_SALES_LABEL_D="demoads.ads_gic_trd_ecu_sales_label_d";
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment