Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
G
gic-spark-tag-4.0
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
wangxiaokang
gic-spark-tag-4.0
Commits
f5b3e5fb
Commit
f5b3e5fb
authored
Aug 26, 2020
by
guos
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
会员标签4.0
parent
1800bc4c
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
38 additions
and
37 deletions
+38
-37
TagAssociatedPurchaseRateFilter.java
...com/gic/spark/filter/TagAssociatedPurchaseRateFilter.java
+9
-9
TagAverageDiscountFactorFilter.java
.../com/gic/spark/filter/TagAverageDiscountFactorFilter.java
+9
-9
TagConsumptionSleepDaysFilter.java
...a/com/gic/spark/filter/TagConsumptionSleepDaysFilter.java
+11
-10
TagPerCustomerTransactionFilter.java
...com/gic/spark/filter/TagPerCustomerTransactionFilter.java
+9
-9
No files found.
src/main/java/com/gic/spark/filter/TagAssociatedPurchaseRateFilter.java
View file @
f5b3e5fb
...
...
@@ -6,6 +6,7 @@ import com.gic.spark.entity.bean.TrdEcuBrandLabelBean;
import
com.gic.spark.entity.request.AbstractFilterRequest
;
import
com.gic.spark.entity.request.TagConsumeDoubleRequest
;
import
com.gic.spark.entity.request.TagConsumeRequest
;
import
com.gic.spark.util.CommonUtil
;
import
org.apache.spark.api.java.JavaRDD
;
import
scala.Tuple2
;
...
...
@@ -40,18 +41,17 @@ public class TagAssociatedPurchaseRateFilter extends AbstractTagConsumFilter{
TagConsumeDoubleRequest
consumeRequest
=(
TagConsumeDoubleRequest
)
request
;
JavaRDD
<
TrdEcuBrandLabelBean
>
consumeRDD
=
MysqlRddManager
.
getPojoFromDataset
(
dataSourceHive
.
getDatasetByEntId
(
enterpriseId
),
TrdEcuBrandLabelBean
.
class
).
javaRDD
();
consumeRDD
=
statisticsTypeHandle
(
consumeRDD
,
consumeRequest
);
JavaRDD
<
Long
>
ecuRdd
=
consumeRDD
.
mapToPair
(
data
->
Tuple2
.
apply
(
data
.
getEcu_id
(),
data
)).
groupByKey
()
JavaRDD
<
Long
>
ecuRdd
=
consumeRDD
.
mapToPair
(
data
->
Tuple2
.
apply
(
data
.
getEcu_id
(),
data
))
.
reduceByKey
((
x
,
y
)->{
x
.
setSeff_goods_num
(
x
.
getSeff_goods_num
()+
y
.
getSeff_goods_num
());
x
.
setOrder_times
(
x
.
getOrder_times
()+
y
.
getOrder_times
());
return
x
;
})
.
mapPartitions
(
data
->{
List
<
Long
>
result
=
new
ArrayList
();
while
(
data
.
hasNext
()){
Tuple2
<
Long
,
Iterable
<
TrdEcuBrandLabelBean
>>
tp2
=
data
.
next
();
long
totalGodsNum
=
0
;
long
totalOrderTimes
=
0
;
for
(
TrdEcuBrandLabelBean
consumeBean:
tp2
.
_2
()){
totalGodsNum
+=
consumeBean
.
getSeff_goods_num
();
totalOrderTimes
+=
consumeBean
.
getOrder_times
();
}
double
jointRate
=
totalGodsNum
/
totalOrderTimes
;
Tuple2
<
Long
,
TrdEcuBrandLabelBean
>
tp2
=
data
.
next
();
double
jointRate
=
CommonUtil
.
isEmptyInteger2int
(
tp2
.
_2
().
getSeff_goods_num
())/
CommonUtil
.
isEmptyInteger2int
(
tp2
.
_2
().
getOrder_times
());
switch
(
consumeRequest
.
getNumberType
()){
case
gt:
if
(
jointRate
>
consumeRequest
.
getBeginNum
()){
...
...
src/main/java/com/gic/spark/filter/TagAverageDiscountFactorFilter.java
View file @
f5b3e5fb
...
...
@@ -6,6 +6,7 @@ import com.gic.spark.entity.bean.TrdEcuBrandLabelBean;
import
com.gic.spark.entity.request.AbstractFilterRequest
;
import
com.gic.spark.entity.request.TagConsumeDoubleRequest
;
import
com.gic.spark.entity.request.TagConsumeRequest
;
import
com.gic.spark.util.CommonUtil
;
import
org.apache.spark.api.java.JavaRDD
;
import
scala.Tuple2
;
...
...
@@ -42,18 +43,17 @@ public class TagAverageDiscountFactorFilter extends AbstractTagConsumFilter {
TagConsumeDoubleRequest
consumeRequest
=(
TagConsumeDoubleRequest
)
request
;
JavaRDD
<
TrdEcuBrandLabelBean
>
consumeRDD
=
MysqlRddManager
.
getPojoFromDataset
(
dataSourceHive
.
getDatasetByEntId
(
enterpriseId
),
TrdEcuBrandLabelBean
.
class
).
javaRDD
();
consumeRDD
=
statisticsTypeHandle
(
consumeRDD
,
consumeRequest
);
JavaRDD
<
Long
>
ecuRdd
=
consumeRDD
.
mapToPair
(
data
->
Tuple2
.
apply
(
data
.
getEcu_id
(),
data
)).
groupByKey
()
JavaRDD
<
Long
>
ecuRdd
=
consumeRDD
.
mapToPair
(
data
->
Tuple2
.
apply
(
data
.
getEcu_id
(),
data
))
.
reduceByKey
((
x
,
y
)->{
x
.
setPay_amt
(
x
.
getPay_amt
()+
y
.
getPay_amt
());
x
.
setTotal_amt
(
x
.
getTotal_amt
()+
y
.
getTotal_amt
());
return
x
;
})
.
mapPartitions
(
data
->{
List
<
Long
>
result
=
new
ArrayList
();
while
(
data
.
hasNext
()){
double
payAmt
=
0
;
double
totalAmt
=
0
;
Tuple2
<
Long
,
Iterable
<
TrdEcuBrandLabelBean
>>
tp2
=
data
.
next
();
for
(
TrdEcuBrandLabelBean
consumeBean:
tp2
.
_2
()){
payAmt
=
consumeBean
.
getPay_amt
();
totalAmt
=
consumeBean
.
getTotal_amt
();
}
double
avgDiscountRate
=
payAmt
/
totalAmt
;
Tuple2
<
Long
,
TrdEcuBrandLabelBean
>
tp2
=
data
.
next
();
double
avgDiscountRate
=
CommonUtil
.
isEmptyDouble2double
(
tp2
.
_2
().
getPay_amt
())/
CommonUtil
.
isEmptyDouble2double
(
tp2
.
_2
().
getTotal_amt
());
switch
(
consumeRequest
.
getNumberType
()){
case
gt:
if
(
avgDiscountRate
>
consumeRequest
.
getBeginNum
()){
...
...
src/main/java/com/gic/spark/filter/TagConsumptionSleepDaysFilter.java
View file @
f5b3e5fb
...
...
@@ -41,47 +41,48 @@ public class TagConsumptionSleepDaysFilter extends AbstractTagConsumFilter{
JavaRDD
<
TrdEcuBrandLabelBean
>
consumeRDD
=
MysqlRddManager
.
getPojoFromDataset
(
dataSourceHive
.
getDatasetByEntId
(
enterpriseId
),
TrdEcuBrandLabelBean
.
class
).
javaRDD
();
consumeRDD
=
statisticsTypeHandle
(
consumeRDD
,
consumeRequest
);
JavaRDD
<
Long
>
ecuRdd
=
consumeRDD
.
mapToPair
(
data
->
Tuple2
.
apply
(
data
.
getEcu_id
(),
data
.
getSleep_days
()))
.
reduceByKey
((
x
,
y
)->
x
>
y
?
y:
x
)
.
mapPartitions
(
data
->{
List
<
Long
>
result
=
new
ArrayList
();
.
mapPartitionsToPair
(
data
->{
List
<
Tuple2
<
Long
,
Integer
>>
result
=
new
ArrayList
();
while
(
data
.
hasNext
()){
Tuple2
<
Long
,
Integer
>
tp2
=
data
.
next
();
switch
(
consumeRequest
.
getNumberType
()){
case
gt:
if
(
tp2
.
_2
()>
consumeRequest
.
getBeginNum
()){
result
.
add
(
tp2
.
_1
()
);
result
.
add
(
tp2
);
}
break
;
case
gte:
if
(
tp2
.
_2
()>=
consumeRequest
.
getBeginNum
()){
result
.
add
(
tp2
.
_1
()
);
result
.
add
(
tp2
);
}
break
;
case
lt:
if
(
tp2
.
_2
()<
consumeRequest
.
getEndNum
()){
result
.
add
(
tp2
.
_1
()
);
result
.
add
(
tp2
);
}
break
;
case
lte:
if
(
tp2
.
_2
()<=
consumeRequest
.
getEndNum
()){
result
.
add
(
tp2
.
_1
()
);
result
.
add
(
tp2
);
}
break
;
case
eq:
if
(
tp2
.
_2
()==
consumeRequest
.
getEqualNum
()){
result
.
add
(
tp2
.
_1
()
);
result
.
add
(
tp2
);
}
break
;
case
between:
if
(
tp2
.
_2
()>=
consumeRequest
.
getBeginNum
()
&&
tp2
.
_2
()<=
consumeRequest
.
getEndNum
()){
result
.
add
(
tp2
.
_1
()
);
result
.
add
(
tp2
);
}
default
:
break
;
}
}
return
result
.
iterator
();
});
})
.
reduceByKey
((
x
,
y
)->
x
)
.
map
(
data
->
data
.
_1
());
return
ecuRdd
;
}
}
src/main/java/com/gic/spark/filter/TagPerCustomerTransactionFilter.java
View file @
f5b3e5fb
...
...
@@ -5,6 +5,7 @@ import com.gic.spark.datasource.mysql.MysqlRddManager;
import
com.gic.spark.entity.bean.TrdEcuBrandLabelBean
;
import
com.gic.spark.entity.request.AbstractFilterRequest
;
import
com.gic.spark.entity.request.TagConsumeRequest
;
import
com.gic.spark.util.CommonUtil
;
import
org.apache.spark.api.java.JavaRDD
;
import
scala.Tuple2
;
...
...
@@ -39,18 +40,17 @@ public class TagPerCustomerTransactionFilter extends AbstractTagConsumFilter{
TagConsumeRequest
consumeRequest
=(
TagConsumeRequest
)
request
;
JavaRDD
<
TrdEcuBrandLabelBean
>
consumeRDD
=
MysqlRddManager
.
getPojoFromDataset
(
dataSourceHive
.
getDatasetByEntId
(
enterpriseId
),
TrdEcuBrandLabelBean
.
class
).
javaRDD
();
consumeRDD
=
statisticsTypeHandle
(
consumeRDD
,
consumeRequest
);
JavaRDD
<
Long
>
ecuRdd
=
consumeRDD
.
mapToPair
(
data
->
Tuple2
.
apply
(
data
.
getEcu_id
(),
data
)).
groupByKey
()
JavaRDD
<
Long
>
ecuRdd
=
consumeRDD
.
mapToPair
(
data
->
Tuple2
.
apply
(
data
.
getEcu_id
(),
data
))
.
reduceByKey
((
x
,
y
)->{
x
.
setPay_amt
(
x
.
getPay_amt
()+
y
.
getPay_amt
());
x
.
setOrder_times
(
x
.
getOrder_times
()+
y
.
getOrder_times
());
return
x
;
})
.
mapPartitions
(
data
->{
List
<
Long
>
result
=
new
ArrayList
();
while
(
data
.
hasNext
()){
Tuple2
<
Long
,
Iterable
<
TrdEcuBrandLabelBean
>>
tp2
=
data
.
next
();
int
consumeTimes
=
0
;
double
payAmt
=
0
;
for
(
TrdEcuBrandLabelBean
consumeBean:
tp2
.
_2
()){
consumeTimes
+=
consumeBean
.
getOrder_times
();
payAmt
+=
consumeBean
.
getPay_amt
();
}
double
CusSinglePiece
=
payAmt
/
consumeTimes
;
Tuple2
<
Long
,
TrdEcuBrandLabelBean
>
tp2
=
data
.
next
();
double
CusSinglePiece
=
CommonUtil
.
isEmptyDouble2double
(
tp2
.
_2
().
getPay_amt
())/
CommonUtil
.
isEmptyInteger2int
(
tp2
.
_2
().
getOrder_times
());
switch
(
consumeRequest
.
getNumberType
()){
case
gt:
if
(
CusSinglePiece
>
consumeRequest
.
getBeginNum
()){
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment