Commit 9a40bf8f by fudahua

feat: 换成json格式

parent 73ded52f
......@@ -16,6 +16,8 @@ public class HDFSUtil {
private static HDFSUtil hdfsUtil=null;
private static FileSystem fileSystem=null;
public static final String HDFS_URL = "/data/hook";
public static HDFSUtil getInstance(){
if (hdfsUtil==null) {
synchronized (HDFSUtil.class) {
......@@ -56,4 +58,26 @@ public class HDFSUtil {
return false;
}
}
public boolean deleteFile(String srcPath){
try {
fileSystem.delete(new Path(srcPath),true);
} catch (IOException e) {
e.printStackTrace();
}
return true;
}
public String getHdfsName(String id){
//下载文件
String dirName="hdfs"+id;
return dirName;
}
public String getHdfsPath(String id){
//下载文件
String dirName=getHdfsName(id);
String path=HDFS_URL+"/"+dirName;
return path;
}
}
......@@ -10,6 +10,7 @@ import com.gic.cloud.data.hook.api.entity.DownloadTaskStatus;
import com.gic.cloud.data.hook.api.entity.FreeQueryTaskCondition;
import com.gic.cloud.data.hook.api.entity.Global;
import com.gic.cloud.data.hook.api.service.IDownloadTaskService;
import com.gic.cloud.data.hook.service.HDFSUtil;
import com.gic.cloud.data.hook.service.MysqlHelper;
import com.gic.cloud.data.hook.service.dao.DownloadRecordDao;
import com.gic.cloud.data.hook.service.dao.DownloadTaskDao;
......@@ -165,6 +166,10 @@ public class DownloadTaskServiceImpl implements IDownloadTaskService {
}
this.downloadTaskDao.deleteDownloadTask(task.getId());
CloudFileUtil.delFileByUrl(task.getFilePath());
String hdfsPath = HDFSUtil.getInstance().getHdfsPath(task.getId());
HDFSUtil.getInstance().deleteFile(hdfsPath);
return true;
} else return false;
}
......
......@@ -73,7 +73,7 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
public static final String SAVE_FOLDER = "/usr/local/data-hook-file";
// public static final String SAVE_FOLDER = "D:\\testorder";
public static final String HDFS_URL = "/data/hook";
// public static final String HDFS_URL = "/data/hook";
public static final String LOCK_KEY="data:hook:hive";
......@@ -919,8 +919,10 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
task.setDownloadWay(-1);
task.setFieldSize(condition.getAllFields().size());
//下载文件
String dirName="hdfs"+task.getId();
String path=HDFS_URL+"/"+dirName;
String dirName=HDFSUtil.getInstance().getHdfsName(task.getId());
String path=HDFSUtil.getInstance().getHdfsPath(task.getId());
HDFSUtil.getInstance().getHdfsName(task.getId());
try {
StopWatch stopWatch = StopWatch.create("down");
stopWatch.start();
......
......@@ -8,12 +8,8 @@ import com.gic.cloud.data.hook.api.entity.FlatQueryTaskCondition;
import com.gic.cloud.data.hook.service.DecryptUtils;
import com.gic.cloud.data.hook.service.DownloadFunc;
import com.gic.cloud.data.hook.service.FileUtil;
import com.gic.cloud.data.hook.service.entity.ColumnInfo;
import com.gic.cloud.data.hook.service.impl.FlatQueryResultServiceImpl;
import com.opencsv.CSVReader;
import com.opencsv.CSVReaderBuilder;
import com.opencsv.RFC4180Parser;
import com.opencsv.RFC4180ParserBuilder;
import org.apache.commons.lang.time.DateUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.poi.ss.usermodel.*;
......@@ -21,6 +17,7 @@ import org.apache.poi.xssf.streaming.SXSSFWorkbook;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
......@@ -51,7 +48,6 @@ public class ExceTest2 {
String lastcells=null;
String[] cells=null;
try {
RFC4180Parser rfc4180Parser = new RFC4180ParserBuilder().build();
CSVReader csvReader = new CSVReaderBuilder(new FileReader(midFile)).build();
cells = csvReader.readNext();
boolean first=true;
......@@ -81,6 +77,57 @@ public class ExceTest2 {
}
}
/**
* 读物文件
* @param dirName
* @param func
*/
private static void readJsonFile(FlatQueryTaskCondition condition, String dirName, DownloadFunc func){
File file = new File(SAVE_FOLDER+"/"+dirName);
File[] files = file.listFiles();
List<FlatQueryCondition> titles=null;
List<FlatQueryCondition> conditions = condition.getConditions();
List<String> keys = conditions.stream().map(mid -> mid.getFieldMark()).collect(Collectors.toList());
Map<String, FlatQueryCondition> columnInfoMap = conditions.stream().collect(Collectors.toMap(mid -> mid.getFieldMark(), mid ->mid));
List<File> fileList = Arrays.stream(files).sorted(Comparator.comparing(File::getName)).collect(Collectors.toList());
for (File midFile : fileList) {
if (!midFile.getName().endsWith("json")) {
continue;
}
try {
BufferedReader reader=new BufferedReader(new FileReader(midFile));
boolean first=true;
Exception exception=null;
try {
String line = reader.readLine();
while (line != null) {
List<String> cellList=new ArrayList<>();
JSONObject jsonObject = JSONObject.parseObject(line);
for (String key : keys) {
String cellVal = jsonObject.getString(key);
cellList.add(cellVal);
}
String[] cells = cellList.toArray(new String[]{});
func.deal(cells,conditions,first);
first=false;
line = reader.readLine();
}
logger.info("读取结束:{}",midFile.getName());
} catch (Exception e) {
exception=e;
}finally {
reader.close();
}
if (exception!=null) {
throw exception;
}
} catch (Exception e) {
logger.info("读取异常:{}",e);
throw new RuntimeException(e);
}
}
}
public static class XlsxFileInfo{
String filepath;
SXSSFWorkbook workbook;
......@@ -188,7 +235,7 @@ public class ExceTest2 {
List<String> xlsxFiles=new ArrayList<>();
AtomicInteger count=new AtomicInteger(0);
AtomicReference<XlsxFileInfo> currentFile=new AtomicReference<>();
readCsvFile(condition,dirName,(cells,titles,firstFlag)->{
readJsonFile(condition,dirName,(cells,titles,firstFlag)->{
if (count.get()==0) {
XlsxFileInfo xlsxFileInfo = new XlsxFileInfo();
xlsxFileInfo.filepath=SAVE_FOLDER + "/" + taskId + xlsxFiles.size() + ".xlsx";
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment