Commit 1843085a by fudahua

Merge branch 'developer' into 'master'

Developer

See merge request !48
parents 6fabc7f7 d0077ff7
......@@ -31,6 +31,25 @@
<dependencies>
<dependency>
<groupId>com.gic</groupId>
<artifactId>gic-commons</artifactId>
<version>${gic-commons}</version>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.opencsv</groupId>
<artifactId>opencsv</artifactId>
<version>3.3</version>
......@@ -78,6 +97,11 @@
-->
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
<version>2.7</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>2.8.2</version>
</dependency>
......
package com.gic.cloud.data.hook.service;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import com.alibaba.fastjson.JSONObject;
import com.ctrip.framework.apollo.Config;
import com.ctrip.framework.apollo.ConfigService;
import com.gic.cloud.data.hook.api.entity.FlatQueryCondition;
import com.gic.cloud.data.hook.api.entity.FlatQueryFieldType;
import com.gic.cloud.data.hook.service.entity.TimeCheckPojo;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.*;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
@Component
public class FilterFieldUtils {
private static Logger logger= LoggerFactory.getLogger(FilterFieldUtils.class);
/** 脱敏字段 */
public static List<String> FILTERS_PHONE_ONLY = Arrays.asList("mobile", "phone", "enterprise_name", "phone_number", "receive_phone_number", "use_phone_number", "friend_phone_num","from_phone_num");
......@@ -58,4 +68,68 @@ public class FilterFieldUtils {
}
}, 5, 5, TimeUnit.SECONDS);
}
private static ThreadLocal<Map<String,TimeCheckPojo>> TIMECHECK_THREADLOCAL=new ThreadLocal<>();
/**
* 初始化 时间校验
* @param queryConditions
*/
public static void timeFieldCheckInit(List<FlatQueryCondition> queryConditions) {
TIMECHECK_THREADLOCAL.remove();
HashMap<String,TimeCheckPojo> retMap=new HashMap<>();
if (CollectionUtils.isEmpty(queryConditions)) {
return;
}
for (FlatQueryCondition queryCondition : queryConditions) {
// 如果存在主过滤器的值,视为过滤器生效
if (StringUtils.isNotBlank(queryCondition.getMainFilter())
&&queryCondition.getFieldType().equals(FlatQueryFieldType.DATE)) {
String startDate = queryCondition.getMainFilter() + "";
String overDate = queryCondition.getExtendFilter() + "";
DateTime start = DateUtil.parse(startDate);
DateTime end = DateUtil.parse(overDate);
TimeCheckPojo timeCheckPojo=new TimeCheckPojo();
timeCheckPojo.setEndStr(overDate);
timeCheckPojo.setStartStr(startDate);
timeCheckPojo.setEndDate(end);
timeCheckPojo.setStartDate(start);
retMap.put(queryCondition.getFieldName(),timeCheckPojo);
}
}
logger.info("开始----:{}",JSONObject.toJSONString(retMap));
TIMECHECK_THREADLOCAL.set(retMap);
}
/**
* 校验时间
* @param key
* @param time
* @return
*/
public static void checkTime(String key,String time,Object orgObj){
Map<String, TimeCheckPojo> checkPojoMap = TIMECHECK_THREADLOCAL.get();
if (checkPojoMap == null) {
return;
}
TimeCheckPojo timeCheckPojo = checkPojoMap.get(key);
if (timeCheckPojo == null) {
return ;
}
DateTime parse = DateUtil.parse(time);
if(parse.isAfter(timeCheckPojo.getStartDate())&&parse.isBefore(timeCheckPojo.getEndDate())){
return ;
}
logger.info("key:{},time:{}---{},,orgTime:{}",key,time,orgObj, JSONObject.toJSON(timeCheckPojo));
}
/**
* 是否校验
*/
public static void removeTimeCheck(){
logger.info("结束----");
TIMECHECK_THREADLOCAL.remove();
}
}
......@@ -201,7 +201,9 @@ public class CsvResultSetHelper implements ResultSetHelper {
int columnType = resultSet.getMetaData().getColumnType(i + 1);
switch (columnType) {
case Types.TIMESTAMP:
result.add(resultSet.getTimestamp(i+1) != null ? datetimeFormatter_threadLocal.get().format(resultSet.getTimestamp(i+1)) : "");
String time = resultSet.getTimestamp(i+1) != null ? datetimeFormatter_threadLocal.get().format(resultSet.getTimestamp(i+1)) : "";
result.add(time);
// FilterFieldUtils.checkTime(columnName,time,resultSet.getObject(i+1));
break;
case Types.DATE:
//result[i] = resultSet.getTimestamp(columnName) != null ? dateFormatter.format(resultSet.getTimestamp(columnName)) : "";
......@@ -216,6 +218,10 @@ public class CsvResultSetHelper implements ResultSetHelper {
if(s2.contains("E0") || s2.startsWith("0")){
s2 = s2 + "\t";
}
s2=s2.replace("\r","");
s2=s2.replace("\n","");
s2=s2.replace(",",",");
s2=s2.replace("\"","");
}
result.add(s2);
break;
......
package com.gic.cloud.data.hook.service.entity;
import cn.hutool.core.date.DateTime;
import java.io.Serializable;
public class TimeCheckPojo implements Serializable {
private String startStr;
private String endStr;
private DateTime startDate;
private DateTime endDate;
public String getStartStr() {
return startStr;
}
public void setStartStr(String startStr) {
this.startStr = startStr;
}
public String getEndStr() {
return endStr;
}
public void setEndStr(String endStr) {
this.endStr = endStr;
}
public DateTime getStartDate() {
return startDate;
}
public void setStartDate(DateTime startDate) {
this.startDate = startDate;
}
public DateTime getEndDate() {
return endDate;
}
public void setEndDate(DateTime endDate) {
this.endDate = endDate;
}
}
......@@ -2,6 +2,7 @@ package com.gic.cloud.data.hook.service.impl;
import cn.medubi.client.utils.LogPak;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.ctrip.framework.apollo.Config;
import com.ctrip.framework.apollo.ConfigService;
import com.gic.cloud.common.api.base.Page;
......@@ -15,6 +16,7 @@ import com.gic.cloud.data.hook.service.dao.FlatQueryTableDao;
import com.gic.cloud.data.hook.service.entity.CsvDataFilterMode;
import com.gic.cloud.data.hook.service.entity.CsvResultSetHelper;
import com.gic.dubbo.entity.ProviderLocalTag;
import com.gic.qcloud.BucketNameEnum;
import com.gic.qcloud.FileUploadUtil;
import com.google.common.collect.Lists;
......@@ -30,6 +32,8 @@ import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.xssf.streaming.SXSSFWorkbook;
import org.datanucleus.store.rdbms.datasource.dbcp.SQLNestedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Service;
......@@ -56,6 +60,8 @@ import java.util.zip.ZipOutputStream;
*/
public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
private static Logger logger= LoggerFactory.getLogger(FlatQueryResultServiceImpl.class);
/** 日志类 */
private static LogPak log = new LogPak(FlatQueryResultServiceImpl.class);
......@@ -68,6 +74,8 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
private static final Integer maxFields = 20;
private static final Integer SMALL_SIZE = 2;
private static final Map<String, String> bigTaskRunningMap = new ConcurrentHashMap<>();
@Autowired
......@@ -80,9 +88,11 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
/** 自助指标查询关联的下载条件列表 */
protected List<FlatQueryTaskCondition> taskConditions = Lists.newArrayList();
protected List<FlatQueryTaskCondition> bigTaskConditions = Lists.newArrayList();
protected List<FlatQueryTaskCondition> smallConditions = Lists.newArrayList();
private FlatQueryResultServiceImpl() {
log.debug("construct", "准备初始化 FlatQuery 查询服务");
runDistTask(3);
runDownloadTask(3);
runBalaDownloadTask(3);
runBigDataDownloadTask(3);
......@@ -552,43 +562,89 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
private static SimpleDateFormat timeFormatter = new SimpleDateFormat("HH:mm:ss");
/** 下载任务执行计时器 */
/** 任务分配 */
//private Timer downloadTaskTimer = new Timer();
ScheduledExecutorService downloadService = new ScheduledThreadPoolExecutor(1,
new BasicThreadFactory.Builder().namingPattern("downloadTimer-%d").daemon(true).build());
ScheduledExecutorService distService = new ScheduledThreadPoolExecutor(1,
new BasicThreadFactory.Builder().namingPattern("distTimer-%d").daemon(true).build());
/** 启动自助指标查询计划任务 */
private void runDownloadTask(Integer interval) {
downloadService.scheduleAtFixedRate(new Runnable() {
/** 启动分配任务任务 */
private void runDistTask(Integer interval) {
distService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
Connection connection = null;
ProviderLocalTag providerLocalTag = ProviderLocalTag.tag.get();
providerLocalTag.traceId = UUID.randomUUID().toString();
try{
if (taskConditions != null && taskConditions.size() > 0) {
FlatQueryTaskCondition condition = null;
//FlatQueryTaskCondition condition = taskConditions.remove(0); // 移除并获取第一个任务条件
logger.info("总数:{}",taskConditions.size());
for (int i=0; i<taskConditions.size(); i++ ) {
log.debug("自助指标当前正在执行的任务为:", JSON.toJSONString(taskConditions.get(i)));
logger.info("[ 自助指标当前正在执行的任务为:]:{}", JSON.toJSONString(taskConditions.get(i)));
if (taskConditions.get(i).getBuildPermitted().equals(Global.YES)
&& !taskConditions.get(i).getEnterpriseIds().contains("ff8080816dd0385e016ddca436d01fe1")) {
if(taskConditions.get(i).getAllFields().size() >= maxFields) {
logger.info("加入相应队列-大数据队列,字段多:{}", JSONObject.toJSONString(taskConditions.get(i)));
bigTaskConditions.add(taskConditions.get(i));
taskConditions.remove(i);
continue;
} else {
if(bigTaskRunningMap.isEmpty()){
if(bigTaskRunningMap.isEmpty()&&CollectionUtils.isEmpty(bigTaskConditions)){
logger.info("加入相应队列-大数据队列:{}", JSONObject.toJSONString(taskConditions.get(i)));
bigTaskConditions.add(taskConditions.get(i));
taskConditions.remove(i);
continue;
}
}
//push进小队列
if (CollectionUtils.isEmpty(smallConditions)||smallConditions.size()<SMALL_SIZE) {
logger.info("加入相应队列-小数据队列:{}", JSONObject.toJSONString(taskConditions.get(i)));
smallConditions.add(taskConditions.get(i));
taskConditions.remove(i);
}else{
logger.info("没有相应add");
}
} // IF OVER
} // FOR OVER
} // 没有任务则忽略
}catch (Exception e){
logger.info("异常:{}",e);
logger.info("[ 自助指标下载异常 ]: {}", e.getMessage());
e.printStackTrace();
} finally {
}
}
}, interval*1000, interval*1000, TimeUnit.MILLISECONDS);
}
/** 下载任务执行计时器 */
//private Timer downloadTaskTimer = new Timer();
ScheduledExecutorService downloadService = new ScheduledThreadPoolExecutor(1,
new BasicThreadFactory.Builder().namingPattern("downloadTimer-%d").daemon(true).build());
/** 启动自助指标查询计划任务 */
private void runDownloadTask(Integer interval) {
downloadService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
ProviderLocalTag providerLocalTag = ProviderLocalTag.tag.get();
providerLocalTag.traceId = UUID.randomUUID().toString();
Connection connection = null;
try{
if (smallConditions != null && smallConditions.size() > 0) {
FlatQueryTaskCondition condition = null;
//FlatQueryTaskCondition condition = taskConditions.remove(0); // 移除并获取第一个任务条件
for (int i=0; i<smallConditions.size(); i++ ) {
logger.info("[ 自助指标当前正在执行的任务为:]:{}", JSON.toJSONString(smallConditions.get(i)));
if (smallConditions.get(i).getBuildPermitted().equals(Global.YES)
&& !smallConditions.get(i).getEnterpriseIds().contains("ff8080816dd0385e016ddca436d01fe1")) {
try {
connection = HiveHelper.getDownloadHiveConnection();
condition = taskConditions.remove(i); // 移除并获取第一个任务条件
condition = smallConditions.remove(i); // 移除并获取第一个任务条件
break;
}catch (Exception e){
log.debug("获取连接异常:", e.getMessage());
logger.info("[ 获取连接异常: ]:{}", e.getMessage());
e.printStackTrace();
continue;
}
......@@ -597,7 +653,8 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
takeFile(condition, connection);
} // 没有任务则忽略
}catch (Exception e){
log.debug("自助指标下载异常", e.getMessage());
logger.info("异常:{}",e);
logger.info("[ 自助指标下载异常 ]: {}", e.getMessage());
e.printStackTrace();
} finally {
if(connection != null){
......@@ -622,13 +679,15 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
bigDataDownloadService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
ProviderLocalTag providerLocalTag = ProviderLocalTag.tag.get();
providerLocalTag.traceId = UUID.randomUUID().toString();
Connection connection = null;
try{
if (bigTaskConditions != null && bigTaskConditions.size() > 0) {
FlatQueryTaskCondition condition = null;
//FlatQueryTaskCondition condition = taskConditions.remove(0); // 移除并获取第一个任务条件
for (int i=0; i<bigTaskConditions.size(); i++ ) {
log.debug("自助指标当前正在执行的任务为:", JSON.toJSONString(bigTaskConditions.get(i)));
logger.info("[ 自助指标当前正在执行的任务为:]: {}", JSON.toJSONString(bigTaskConditions.get(i)));
if (bigTaskConditions.get(i).getBuildPermitted().equals(Global.YES)) {
try{
condition = bigTaskConditions.remove(i); // 移除并获取第一个任务条件
......@@ -636,7 +695,8 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
connection = HiveHelper.getBigDataDownloadHiveConnection();
break;
}catch (Exception e){
log.debug("获取连接异常:", e.getMessage());
logger.info("异常:{}",e);
logger.info("[ 获取连接异常: ]: {}", e.getMessage());
e.printStackTrace();
continue;
}
......@@ -646,7 +706,8 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
bigTaskRunningMap.remove(condition.getTaskId());
} // 没有任务则忽略
}catch (Exception e){
log.debug("自助指标下载异常", e.getMessage());
logger.info("下载异常:{}",e);
logger.info("[ 自助指标下载异常 ]: {}", e.getMessage());
e.printStackTrace();
} finally {
if(connection != null){
......@@ -671,12 +732,15 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
@Override
public void run() {
Connection connection = null;
ProviderLocalTag providerLocalTag = ProviderLocalTag.tag.get();
providerLocalTag.traceId = UUID.randomUUID().toString();
try{
if (taskConditions != null && taskConditions.size() > 0) {
FlatQueryTaskCondition condition = null;
//FlatQueryTaskCondition condition = taskConditions.remove(0); // 移除并获取第一个任务条件
for (int i=0; i<taskConditions.size(); i++ ) {
log.debug("自助指标当前正在执行的任务为:", JSON.toJSONString(taskConditions.get(i)));
logger.info("[ 自助指标当前正在执行的任务为:]: {}", JSON.toJSONString(taskConditions.get(i)));
if (taskConditions.get(i).getBuildPermitted().equals(Global.YES)
&& taskConditions.get(i).getEnterpriseIds().contains("ff8080816dd0385e016ddca436d01fe1")) {
try{
......@@ -684,7 +748,7 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
condition = taskConditions.remove(i); // 移除并获取第一个任务条件
break;
}catch (Exception e){
log.debug("获取连接异常:", e.getMessage());
logger.info("[ 获取连接异常: ]: {}", e.getMessage());
e.printStackTrace();
continue;
}
......@@ -695,7 +759,8 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
takeFile(condition, connection);
} // 没有任务则忽略
}catch (Exception e){
log.debug("自助指标下载异常", e.getMessage());
logger.info("异常:{}",e);
logger.info("[ 自助指标下载异常 ]: {}", e.getMessage());
e.printStackTrace();
} finally {
if(connection != null){
......@@ -716,7 +781,10 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
DownloadTask task = DownloadTaskServiceImpl.getInstance().getDownloadTaskById(condition.getTaskId());
task.setStatus(DownloadTaskStatus.BUILDING);
DownloadTaskServiceImpl.getInstance().updateDownloadTask(task);
log.debug("runDownloadTask.run", "自助指标下载任务执行:" + task.getId());
logger.info("[ runDownloadTask.run ]: {}", "自助指标下载任务执行:" + task.getId());
//初始化校验
// FilterFieldUtils.timeFieldCheckInit(condition.getConditions());
String fullQuery = buildFlatQuerySQL(
false, // 下载用途
condition.getTableId(),
......@@ -728,8 +796,8 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
0,
condition.getAuthStoreIdList());
log.debug("runDownloadTask.run", "获取商户连接:" + task.getId());
log.debug("sql-", task.getId() + "-" + fullQuery);
logger.info("[ runDownloadTask.run ]: {}", "获取商户连接:" + task.getId());
logger.info("[ sql- ]: {}", task.getId() + "-" + fullQuery);
if (conn != null) {
try {
Statement stat = conn.createStatement();
......@@ -742,7 +810,7 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
// 生成指定格式下载元文件
String originalFilePath = "";
if (task.getFormat().equals(DownloadFileFormat.CSV)) { // 如果指定为 CSV 格式
log.debug("runDownloadTask.run", "准备生成自助指标下载文件 " + condition.getTaskId() + ".csv");
logger.info("[ runDownloadTask.run ]: {}", "准备生成自助指标下载文件 " + condition.getTaskId() + ".csv");
originalFilePath = SAVE_FOLDER + "/" + condition.getTaskId() + ".csv";
File tmp = new File(originalFilePath);
if (tmp.exists()) { // 删除可能存在的文件
......@@ -755,9 +823,9 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
writer.writeAll(rs, true);
writer.close();
out.close();//记得关闭资源
log.debug("runDownloadTask.run", "已生成自助指标下载文件 " + condition.getTaskId() + ".csv");
logger.info("[ runDownloadTask.run ]: {}", "已生成自助指标下载文件 " + condition.getTaskId() + ".csv");
} else { // 如果指定为 XLS 格式
log.debug("runDownloadTask.run", "准备生成自助指标下载文件 " + condition.getTaskId() + ".xlsx");
logger.info("[ runDownloadTask.run ]: {}", "准备生成自助指标下载文件 " + condition.getTaskId() + ".xlsx");
originalFilePath = SAVE_FOLDER + "/" + condition.getTaskId() + ".xlsx";
SXSSFWorkbook wb = new SXSSFWorkbook(100); // 内存中保留 100 行
Sheet sheet = wb.createSheet();
......@@ -784,14 +852,14 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
fileOut.close();
//wb.close();
wb.dispose(); // SXSSFWorkbook 没有 close 方法
log.debug("runDownloadTask.run", "已生成自助指标下载文件 " + condition.getTaskId() + ".xlsx");
logger.info("[ runDownloadTask.run ]: {}", "已生成自助指标下载文件 " + condition.getTaskId() + ".xlsx");
} // IF ELSE OVER
String cloudFileUrl = "https://";
// 如果指定压缩,则使用之
//if (task.getFormat().equals("zip")) {
String taskFileExt = task.getUseCompress().equals(Global.YES) ? ".zip" : task.getFormat().equals(DownloadFileFormat.CSV) ? ".csv" : ".xlsx";
if (task.getUseCompress().equals(Global.YES)) {
log.debug("runDownloadTask.run", "准备生成自助指标压缩文件 " + condition.getTaskId() + ".zip");
logger.info("[ runDownloadTask.run ]: {}", "准备生成自助指标压缩文件 " + condition.getTaskId() + ".zip");
String zipFilePath = SAVE_FOLDER + "/" + condition.getTaskId() + ".zip";
File zipFile = new File(zipFilePath);
ZipOutputStream zos = null;
......@@ -812,27 +880,28 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
is.close();
//bos.close();
//os.close();
log.debug("runDownloadTask.run", "已生成自助指标压缩文件 " + condition.getTaskId() + ".zip");
logger.info("[ runDownloadTask.run ]: {}", "已生成自助指标压缩文件 " + condition.getTaskId() + ".zip");
} catch (Exception ex2) {
throw ex2;
} finally {
zos.closeEntry();
zos.close();
}
log.debug("开始上传压缩文件到腾讯云", task.getId());
logger.info("[ 开始上传压缩文件到腾讯云 ]: {}", task.getId());
cloudFileUrl += FileUploadUtil.simpleUploadFileFromLocal(zipFile, task.getName() + "-" + task.getId()+taskFileExt, BucketNameEnum.COMPRESS_60000.getName());
} else {
log.debug("开始上传文件到腾讯云", task.getId());
logger.info("[ 开始上传文件到腾讯云 ]: {}", task.getId());
cloudFileUrl += FileUploadUtil.simpleUploadFileFromLocal(new File(originalFilePath), task.getName() + "-" + task.getId()+taskFileExt, BucketNameEnum.REPORT_50000.getName());
}
log.debug("上传腾讯云", "地址为:"+cloudFileUrl);
logger.info("[ 上传腾讯云 ]: {}", "地址为:"+cloudFileUrl);
task.setStatus(DownloadTaskStatus.COMPLISHED);
task.setOverTime(new Date());
task.setFilePath(cloudFileUrl);
DownloadTaskServiceImpl.getInstance().updateDownloadTask(task);
} catch (Exception ex) {
logger.info(task.getId()+" 下载异常异常:{}",ex);
ex.printStackTrace();
// 标记任务异常
task.setStatus(DownloadTaskStatus.ERROR);
......@@ -844,6 +913,8 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
} catch (SQLException e) {
e.printStackTrace();
}
// 移除校验
// FilterFieldUtils.removeTimeCheck();
}
} // IF OVER
......
<?xml version="1.0" encoding="UTF-8"?>
<Configuration status="Trace">
<Appenders>
<Console name="Console" target="SYSTEM_OUT">
<PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/>
</Console>
</Appenders>
<Loggers>
<Root level="error">
<AppenderRef ref="Console"/>
</Root>
</Loggers>
</Configuration>
\ No newline at end of file
......@@ -19,6 +19,8 @@ import com.gic.enterprise.api.dto.StoreWidgetDTO;
import com.gic.enterprise.api.service.EnterpriseService;
import com.gic.enterprise.api.service.StoreWidgetService;
import com.gic.web.common.utils.SessionContextUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.util.CollectionUtils;
......@@ -39,6 +41,8 @@ import java.util.List;
@Controller
public class FlatQueryController {
private static Logger logger= LoggerFactory.getLogger(FlatQueryController.class);
/** 脱敏字段 */
public static final List<String> FILTERS_PHONE_ONLY = Arrays.asList("mobile", "phone", "enterprise_name", "phone_number", "receive_phone_number", "use_phone_number", "friend_phone_num","from_phone_num");
......@@ -145,8 +149,8 @@ public class FlatQueryController {
*/
@RequestMapping("/flat-query")
public FlatQueryResult doFlatQuery(@RequestBody FlatQueryExecuteRequest executeRequest, HttpServletRequest request, HttpServletResponse response) {
System.out.println("doFlatQuery:" + executeRequest.getTableId());
System.out.println("doFlatQuery:" + executeRequest.getQueryConditions().size());
logger.info("doFlatQuery:" + executeRequest.getTableId());
logger.info("doFlatQuery:" + executeRequest.getQueryConditions().size());
String enterpriseId = SessionContextUtils.getLoginUserEnterpriseId();
EnterpriseSettingDTO setting = this.enterpriseService.getEnterpriseSettingByEnterpriseId(enterpriseId);
int dataPermission = 0;
......@@ -154,7 +158,7 @@ public class FlatQueryController {
dataPermission = setting.getDataPermission();
}
for (FlatQueryCondition condition : executeRequest.getQueryConditions()) {
System.out.println(condition.getFieldName() + " " + condition.getFieldType());
logger.info(condition.getFieldName() + " " + condition.getFieldType());
}
AuthorizedUser loginUser = SessionContextUtils.getLoginUser();
DhSearchLogDTO dto = new DhSearchLogDTO();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment