Commit e7679764 by fudahua

Merge branch 'feature-2023-04' into 'master'

Feature 2023 04

See merge request !94
parents 8330f895 fced8c62
......@@ -4,12 +4,12 @@
<modelVersion>4.0.0</modelVersion>
<groupId>com.gic</groupId>
<artifactId>gic-cloud-data-hook-api</artifactId>
<version>2.42</version>
<version>3.0-SNAPSHOT</version>
<dependencies>
<dependency>
<groupId>com.gic</groupId>
<artifactId>gic-cloud-web-service-api</artifactId>
<version>3.80</version>
<version>3.0-SNAPSHOT</version>
<scope>compile</scope>
<exclusions>
<exclusion>
......@@ -25,7 +25,7 @@
<dependency>
<groupId>com.gic</groupId>
<artifactId>gic-cloud-commons</artifactId>
<version>3.64</version>
<version>3.0-SNAPSHOT</version>
<scope>compile</scope>
<exclusions>
<exclusion>
......@@ -41,7 +41,7 @@
<dependency>
<groupId>com.gic</groupId>
<artifactId>gic-platform-config</artifactId>
<version>2.29</version>
<version>3.0-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
......
......@@ -8,6 +8,9 @@ public class DownloadTaskStatus {
/** 排队中 */
public static final String WAITING = "waiting";
/** hdfs已经处理完等待下载 */
public static final String DOWNLOAD_HDFS = "downloading";
/** 生成中 */
public static final String BUILDING = "building";
......
......@@ -69,4 +69,6 @@ public interface IDownloadTaskService {
DownloadProcessDTO getDownloadProcess();
void checkTaskStatus(String param);
public List<DownloadTask> getDownloadTaskOfBuilding(String queryDataSource);
}
......@@ -11,6 +11,12 @@ import java.util.List;
*/
public interface IFlatQueryResultService {
/**
* mq处理
* @param params
*/
public void dealFileMq(String params) ;
/** 自助指标查询
* @param tableId 指定的表明
* @param enterpriseIds 要查询的企业编号集合
......
......@@ -146,6 +146,16 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoopCommonVersion}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoopCommonVersion}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoopCommonVersion}</version>
<exclusions>
......@@ -188,7 +198,11 @@
<artifactId>gic-enterprise-api</artifactId>
<version>${gic-enterprise-api}</version>
</dependency>
<dependency>
<groupId>com.gic</groupId>
<artifactId>gic-redis-data</artifactId>
<version>${gic-redis-data}</version>
</dependency>
</dependencies>
<build>
......@@ -199,8 +213,8 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<configuration>
<source>1.7</source>
<target>1.7</target>
<source>1.8</source>
<target>1.8</target>
<encoding>UTF-8</encoding>
<compilerArguments>
<verbose/>
......
......@@ -3,6 +3,7 @@ package com.gic.cloud.data.hook.service;
import com.alibaba.dubbo.common.utils.StringUtils;
import com.ctrip.framework.apollo.Config;
import com.ctrip.framework.apollo.ConfigService;
import com.gic.cloud.data.hook.api.dto.QueryDataType;
import com.gic.cloud.data.hook.api.entity.FlatQueryCondition;
import com.gic.cloud.data.hook.service.impl.FreeQueryServiceImpl;
import com.gic.data.shield.SdkEnv;
......@@ -102,4 +103,26 @@ public class DecryptUtils {
}
}
/**
* 脱敏
* @param queryDataType
* @param columnVal
* @param condition
* @return
*/
public static String encryptionDeal(int queryDataType,String columnVal,FlatQueryCondition condition){
if(queryDataType == QueryDataType.FULL){
return columnVal;
} else {
if(condition.getEnableEncrypt()) {
return "******";
}else if (FilterFieldUtils.FILETERS_USER_NAME.contains(condition.getFieldName())) {
return DecryptUtils.dataSecurityProcessUserName(columnVal);
} else {
return columnVal;
}
}
}
}
package com.gic.cloud.data.hook.service;
import com.gic.cloud.data.hook.api.entity.FlatQueryCondition;
import com.gic.cloud.data.hook.service.entity.ColumnInfo;
import java.util.List;
public interface DownloadFunc {
public void deal(String[] cells, List<FlatQueryCondition> titles, boolean fileFirst);
}
package com.gic.cloud.data.hook.service;
import com.ctrip.framework.apollo.Config;
import com.ctrip.framework.apollo.ConfigService;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.URI;
public class HDFSUtil {
private static Logger logger = LoggerFactory.getLogger(HDFSUtil.class);
private static HDFSUtil hdfsUtil=null;
private static FileSystem fileSystem=null;
public static HDFSUtil getInstance(){
if (hdfsUtil==null) {
synchronized (HDFSUtil.class) {
if (hdfsUtil==null) {
hdfsUtil = new HDFSUtil();
}
}
}
return hdfsUtil;
}
private HDFSUtil(){
Config appConfig = ConfigService.getAppConfig();
String hdfsUrl = appConfig.getProperty("hdfs.url", null);
Configuration configuration = new Configuration();
configuration.set("fs.defaultFS",hdfsUrl);
try {
fileSystem = FileSystem.get(new URI(hdfsUrl),configuration,"hadoop");
} catch (Exception e) {
logger.info("hdfs初始化失败-{}",e);
}
}
/**
* 下载到本地
* @param srcPath
* @param toPath
* @return
*/
public boolean downloadFile(String srcPath,String toPath) {
try {
fileSystem.copyToLocalFile(true,new Path(srcPath),new Path(toPath));
return true;
} catch (IOException e) {
logger.info("下载失败:{}",e);
return false;
}
}
}
......@@ -62,8 +62,23 @@ public interface DownloadTaskDao {
*/
public List<DownloadTask> getDownloadTaskOfWaiting(@Param("queryDataSource") String queryDataSource);
/** 获取等待申请通过状态的任务
* @return
*/
public List<DownloadTask> getDownloadTaskOfHasDownload(@Param("queryDataSource") String queryDataSource, @Param("num") int num);
DownloadProcessDTO getDownloadProcess();
int updateTaskStatusError(@Param("idList") List<String> idList);
/** 获取等待申请通过状态的任务
* @return
*/
public int getCountDownloadTaskOfBuilding(@Param("queryDataSource") String queryDataSource);
/** 获取等待申请通过状态的任务
* @return
*/
public List<DownloadTask> getDownloadTaskOfBuilding(@Param("queryDataSource") String queryDataSource);
}
package com.gic.cloud.data.hook.service.entity;
import java.io.Serializable;
public class ColumnInfo implements Serializable {
private String type;
private String title;
public ColumnInfo() {
}
public ColumnInfo(String type, String title) {
this.type = type;
this.title = title;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
}
......@@ -14,6 +14,7 @@ import com.gic.cloud.data.hook.service.MysqlHelper;
import com.gic.cloud.data.hook.service.dao.DownloadRecordDao;
import com.gic.cloud.data.hook.service.dao.DownloadTaskDao;
import com.gic.cloud.data.hook.service.dao.RiskModeRecordDao;
import com.gic.thirdparty.cloudfile.CloudFileUtil;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.opencsv.CSVWriter;
......@@ -104,6 +105,10 @@ public class DownloadTaskServiceImpl implements IDownloadTaskService {
if (CollectionUtils.isNotEmpty(result.getList())) {
for (DownloadTask downloadTask : result.getList()) {
//下载中也是等待中 用于前端展示
if (downloadTask.getStatus().equals(DownloadTaskStatus.DOWNLOAD_HDFS)){
downloadTask.setStatus(DownloadTaskStatus.WAITING);
}
if (StringUtils.isBlank(downloadTask.getFilePath())) {
continue;
}
......@@ -159,6 +164,7 @@ public class DownloadTaskServiceImpl implements IDownloadTaskService {
}
}
this.downloadTaskDao.deleteDownloadTask(task.getId());
CloudFileUtil.delFileByUrl(task.getFilePath());
return true;
} else return false;
}
......@@ -228,6 +234,27 @@ public class DownloadTaskServiceImpl implements IDownloadTaskService {
return downloadTaskDao.getDownloadTaskOfWaiting(queryDataSource);
}
/** 获取在审核申请等待状态中的任务
* @return
*/
public List<DownloadTask> getDownloadTaskOfHasDownload(String queryDataSource,int num) {
return downloadTaskDao.getDownloadTaskOfHasDownload(queryDataSource, num);
}
/** 获取在审核申请等待状态中的任务
* @return
*/
public int getCountDownloadTaskOfBuilding(String queryDataSource) {
return downloadTaskDao.getCountDownloadTaskOfBuilding(queryDataSource);
}
/** 获取在审核申请等待状态中的任务
* @return
*/
public List<DownloadTask> getDownloadTaskOfBuilding(String queryDataSource) {
return downloadTaskDao.getDownloadTaskOfBuilding(queryDataSource);
}
/** 获取指定申请编号的风险模式记录
* @param applyId
......
......@@ -610,7 +610,7 @@ public class FreeQueryServiceImpl implements IFreeQueryService {
private String cloudFileUpload(String enterpriseId,File file,String fileExt){
EnterpriseDTO enterprise = enterpriseService.getEnterpriseById(enterpriseId);
CloudFileInfo zip = CloudFileUtil.uploadFile(file, fileExt, CloudFileTypeEnum.FILE, enterprise.getFactoryCode(), CloudFileBusinessOptEnum.HOOK_COMMON);
CloudFileInfo zip = CloudFileUtil.uploadTempFile(file, fileExt, CloudFileTypeEnum.FILE, enterprise.getFactoryCode(), CloudFileBusinessOptEnum.HOOK_COMMON,2L);
FileRecordLogUtil.recordUseLog("FreeQueryServiceImpl#cloudFileUpload||"+fileExt);
return zip.getOrgFileUrl();
}
......
......@@ -21,6 +21,7 @@
<!--<import resource="classpath:data-hook-service-dubbo-config.xml"/>-->
<!--<import resource="classpath:data-hook-flat-query-source.xml"/>-->
<!--<import resource="classpath:data-hook-free-query-source.xml"/>-->
<import resource="classpath*:redis-init.xml" />
<import resource="classpath*:data-hook-service-druid-prod.xml"/>
<import resource="classpath:data-hook-init.xml"/>
<import resource="classpath*:dubbo-setting.xml"/>
......
......@@ -30,7 +30,8 @@
q.apply_status,
q.real_sql,
q.enterprise_id,
q.report_id
q.report_id,
q.download_condition
</sql>
......@@ -187,6 +188,19 @@
AND q.del_flag = '0'
</select>
<select id="getDownloadTaskOfHasDownload" resultType="DownloadTask">
SELECT *
FROM
<include refid="queryTables"/>
<include refid="queryJoins"/>
WHERE
q.query_data_source = #{queryDataSource}
AND q.status = "downloading"
AND q.apply_permitted = 1
AND q.del_flag = '0'
order by start_time asc limit ${num}
</select>
<select id="getDownloadProcess" resultType="DownloadProcessDTO">
SELECT sum(amount) dataAmount,count(1) taskAmount
FROM
......@@ -207,4 +221,27 @@
</foreach>
</update>
<select id="getCountDownloadTaskOfBuilding" resultType="int">
SELECT count(*)
FROM
<include refid="queryTables"/>
<include refid="queryJoins"/>
WHERE
q.query_data_source = #{queryDataSource}
AND q.status = "building"
AND q.del_flag = '0'
</select>
<select id="getDownloadTaskOfBuilding" resultType="DownloadTask">
SELECT *
FROM
<include refid="queryTables"/>
<include refid="queryJoins"/>
WHERE
q.query_data_source = #{queryDataSource}
AND q.status = "building"
AND q.del_flag = '0'
order by start_time asc
</select>
</mapper>
\ No newline at end of file
import org.apache.poi.ss.usermodel.*;
import org.apache.poi.xssf.streaming.SXSSFWorkbook;
import java.io.FileOutputStream;
import java.util.Date;
public class ExceTest2 {
public static void main(String[] args) {
int n=25;
int c=100;
Double aDouble = Double.valueOf((double) n / c);
System.out.println(aDouble);
}
}
......@@ -21,10 +21,6 @@ public class ExcelTest {
DataFormat format = wb.createDataFormat();
cellStyle.setDataFormat(format.getFormat("yyyy-MM-dd HH:mm:ss"));
// CellStyle cellStyle = wb.createCellStyle();
// DataFormat format = wb.createDataFormat();
// cellStyle.setDataFormat(format.getFormat("yyyy-MM-dd HH:mm:ss"));
int n=0;
do {
Row row = sheet.createRow(n);
......
import com.alibaba.fastjson.JSON;
import com.gic.cloud.data.hook.api.dto.DownloadTask;
import com.gic.cloud.data.hook.api.entity.FlatQueryTaskCondition;
import com.gic.cloud.data.hook.service.impl.DownloadTaskServiceImpl;
import com.gic.cloud.data.hook.service.impl.FlatQueryResultServiceImpl;
import com.gic.redis.data.util.RedisUtil;
import org.junit.runner.RunWith;
import org.redisson.api.RedissonClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {"classpath*:applicationContext-conf.xml"})
public class Test {
@Autowired
private FlatQueryResultServiceImpl flatQueryResultService;
@Autowired
private DownloadTaskServiceImpl downloadTaskService;
@org.junit.Test
public void test(){
DownloadTask downloadTask = downloadTaskService.getDownloadTaskById("1680844420436");
flatQueryResultService.takeFileNew(downloadTask);
try {
Thread.sleep(10000000000L);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
@org.junit.Test
public void test3(){
ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext("redis-init.xml");
RedissonClient redisClient = RedisUtil.getRedisClient();
System.out.println(redisClient);
}
@org.junit.Test
public void test2(){
String json="{\"allFields\":[\"is_mbr\",\"main_store_name\",\"main_store_code\",\"card_num\",\"open_card_time\",\"clerk_name\",\"add_clerk_code\",\"clerk_store_name\",\"clerk_store_code\",\"add_buddy_time\",\"create_time\"],\"amount\":1169963,\"authStoreIdList\":[],\"buildPermitted\":\"1\",\"conditions\":[{\"enableEncrypt\":false,\"extendFilter\":\"\",\"fieldMark\":\"是否会员\",\"fieldName\":\"is_mbr\",\"fieldType\":\"text\",\"filterTunnel\":\"\",\"mainFilter\":\"是\"},{\"enableEncrypt\":false,\"extendFilter\":\"\",\"fieldMark\":\"会员服务门店名称\",\"fieldName\":\"main_store_name\",\"fieldType\":\"text\",\"filterTunnel\":\"\",\"mainFilter\":\"\"},{\"enableEncrypt\":false,\"extendFilter\":\"\",\"fieldMark\":\"会员服务门店code\",\"fieldName\":\"main_store_code\",\"fieldType\":\"text\",\"filterTunnel\":\"\",\"mainFilter\":\"\"},{\"enableEncrypt\":false,\"extendFilter\":\"\",\"fieldMark\":\"会员卡号\",\"fieldName\":\"card_num\",\"fieldType\":\"text\",\"filterTunnel\":\"\",\"mainFilter\":\"\"},{\"enableEncrypt\":false,\"extendFilter\":\"\",\"fieldMark\":\"开卡时间\",\"fieldName\":\"open_card_time\",\"fieldType\":\"date\",\"filterTunnel\":\"\",\"mainFilter\":\"\"},{\"enableEncrypt\":false,\"extendFilter\":\"\",\"fieldMark\":\"好友导购姓名\",\"fieldName\":\"clerk_name\",\"fieldType\":\"text\",\"filterTunnel\":\"\",\"mainFilter\":\"\"},{\"enableEncrypt\":false,\"extendFilter\":\"\",\"fieldMark\":\"好友导购code\",\"fieldName\":\"add_clerk_code\",\"fieldType\":\"text\",\"filterTunnel\":\"\",\"mainFilter\":\"\"},{\"enableEncrypt\":false,\"extendFilter\":\"\",\"fieldMark\":\"导购门店名称\",\"fieldName\":\"clerk_store_name\",\"fieldType\":\"text\",\"filterTunnel\":\"\",\"mainFilter\":\"\"},{\"enableEncrypt\":false,\"extendFilter\":\"\",\"fieldMark\":\"好友导购所属门店code\",\"fieldName\":\"clerk_store_code\",\"fieldType\":\"text\",\"filterTunnel\":\"\",\"mainFilter\":\"\"},{\"enableEncrypt\":false,\"extendFilter\":\"2023-02-28 23:59:59\",\"fieldMark\":\"添加好友时间\",\"fieldName\":\"add_buddy_time\",\"fieldType\":\"date\",\"filterTunnel\":\"\",\"mainFilter\":\"2010-03-01 00:00:00\"},{\"enableEncrypt\":false,\"extendFilter\":\"\",\"fieldMark\":\"好办导购与客户建立好友关系时间\",\"fieldName\":\"create_time\",\"fieldType\":\"date\",\"filterTunnel\":\"\",\"mainFilter\":\"\"}],\"decryptFilters\":[],\"enterpriseIds\":[\"ff80808180b3c54a0180bc3df3bb4bca\"],\"execDistinct\":false,\"orderDir\":\"\",\"orderField\":\"\",\"queryDataType\":0,\"tableId\":\"extract_entwechat_detail_d\",\"taskId\":\"1678674610424\"}";
FlatQueryTaskCondition condition = JSON.parseObject(json, FlatQueryTaskCondition.class);
String fullQuery = flatQueryResultService.buildFlatQuerySQL(
false, // 下载用途
condition.getTableId(),
condition.getEnterpriseIds(),
condition.getConditions(),
condition.getOrderField(),
condition.getOrderDir(),
condition.getExecDistinct(),
0,
condition.getAuthStoreIdList());
System.out.println(fullQuery);
}
}
package com.gic;
import com.gic.redis.data.util.RedisUtil;
import org.junit.Test;
import org.redisson.api.RMap;
import org.redisson.api.RedissonClient;
import org.springframework.context.support.ClassPathXmlApplicationContext;
public class Test2 {
@Test
public void test(){
ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext("redis-init.xml");
RedissonClient redisClient = RedisUtil.getRedisClient();
RMap<Object, Object> aaa = redisClient.getMap("aaa123");
aaa.put("aa","2");
}
}
import com.gic.cloud.data.hook.service.HDFSUtil;
import com.opencsv.CSVReader;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.io.FileReader;
import java.io.IOException;
import java.util.Arrays;
public class hdfs {
public static void main(String[] args) {
// Configuration configuration = new Configuration();
// configuration.set("fs.defaultFS","hdfs://10.0.1.13:4007");
// try {
// FileSystem fileSystem = FileSystem.get(configuration);
// fileSystem.copyToLocalFile(false,new Path("/data/emr/order-1.csv"),new Path("D:\\testorder"));
// fileSystem.close();
// } catch (IOException e) {
// e.printStackTrace();
// }
HDFSUtil.getInstance().downloadFile("/data/hook/hdfs1680836264869","D:\\testorder");
// Integer[] arr=new Integer[]{1,2,3,4,5,6};
// Arrays.asList(arr).parallelStream().forEach(mid->{
// Thread thread = Thread.currentThread();
// String name = thread.getName();
// System.out.println(name);
// });
// try{
// CSVReader csvReader = new CSVReader(new FileReader("C:\\Users\\hua\\Desktop\\part-00000-fa2dd286-1eda-452d-91a3-a222beb0f327-c000.csv"));
// String[] cells = csvReader.readNext();
// System.out.println(cells);
// }catch (Exception e) {
//
// }
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment