Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
G
gic-cloud
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
data-hook
gic-cloud
Commits
9a40bf8f
Commit
9a40bf8f
authored
Apr 25, 2023
by
fudahua
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
feat: 换成json格式
parent
73ded52f
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
87 additions
and
9 deletions
+87
-9
HDFSUtil.java
...c/main/java/com/gic/cloud/data/hook/service/HDFSUtil.java
+24
-0
DownloadTaskServiceImpl.java
...cloud/data/hook/service/impl/DownloadTaskServiceImpl.java
+5
-0
FlatQueryResultServiceImpl.java
...ud/data/hook/service/impl/FlatQueryResultServiceImpl.java
+5
-3
ExceTest2.java
...ud-data-hook-service/src/test/java/com/gic/ExceTest2.java
+53
-6
No files found.
gic-cloud-data-hook-service/src/main/java/com/gic/cloud/data/hook/service/HDFSUtil.java
View file @
9a40bf8f
...
...
@@ -16,6 +16,8 @@ public class HDFSUtil {
private
static
HDFSUtil
hdfsUtil
=
null
;
private
static
FileSystem
fileSystem
=
null
;
public
static
final
String
HDFS_URL
=
"/data/hook"
;
public
static
HDFSUtil
getInstance
(){
if
(
hdfsUtil
==
null
)
{
synchronized
(
HDFSUtil
.
class
)
{
...
...
@@ -56,4 +58,26 @@ public class HDFSUtil {
return
false
;
}
}
public
boolean
deleteFile
(
String
srcPath
){
try
{
fileSystem
.
delete
(
new
Path
(
srcPath
),
true
);
}
catch
(
IOException
e
)
{
e
.
printStackTrace
();
}
return
true
;
}
public
String
getHdfsName
(
String
id
){
//下载文件
String
dirName
=
"hdfs"
+
id
;
return
dirName
;
}
public
String
getHdfsPath
(
String
id
){
//下载文件
String
dirName
=
getHdfsName
(
id
);
String
path
=
HDFS_URL
+
"/"
+
dirName
;
return
path
;
}
}
gic-cloud-data-hook-service/src/main/java/com/gic/cloud/data/hook/service/impl/DownloadTaskServiceImpl.java
View file @
9a40bf8f
...
...
@@ -10,6 +10,7 @@ import com.gic.cloud.data.hook.api.entity.DownloadTaskStatus;
import
com.gic.cloud.data.hook.api.entity.FreeQueryTaskCondition
;
import
com.gic.cloud.data.hook.api.entity.Global
;
import
com.gic.cloud.data.hook.api.service.IDownloadTaskService
;
import
com.gic.cloud.data.hook.service.HDFSUtil
;
import
com.gic.cloud.data.hook.service.MysqlHelper
;
import
com.gic.cloud.data.hook.service.dao.DownloadRecordDao
;
import
com.gic.cloud.data.hook.service.dao.DownloadTaskDao
;
...
...
@@ -165,6 +166,10 @@ public class DownloadTaskServiceImpl implements IDownloadTaskService {
}
this
.
downloadTaskDao
.
deleteDownloadTask
(
task
.
getId
());
CloudFileUtil
.
delFileByUrl
(
task
.
getFilePath
());
String
hdfsPath
=
HDFSUtil
.
getInstance
().
getHdfsPath
(
task
.
getId
());
HDFSUtil
.
getInstance
().
deleteFile
(
hdfsPath
);
return
true
;
}
else
return
false
;
}
...
...
gic-cloud-data-hook-service/src/main/java/com/gic/cloud/data/hook/service/impl/FlatQueryResultServiceImpl.java
View file @
9a40bf8f
...
...
@@ -73,7 +73,7 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
public
static
final
String
SAVE_FOLDER
=
"/usr/local/data-hook-file"
;
// public static final String SAVE_FOLDER = "D:\\testorder";
public
static
final
String
HDFS_URL
=
"/data/hook"
;
//
public static final String HDFS_URL = "/data/hook";
public
static
final
String
LOCK_KEY
=
"data:hook:hive"
;
...
...
@@ -919,8 +919,10 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
task
.
setDownloadWay
(-
1
);
task
.
setFieldSize
(
condition
.
getAllFields
().
size
());
//下载文件
String
dirName
=
"hdfs"
+
task
.
getId
();
String
path
=
HDFS_URL
+
"/"
+
dirName
;
String
dirName
=
HDFSUtil
.
getInstance
().
getHdfsName
(
task
.
getId
());
String
path
=
HDFSUtil
.
getInstance
().
getHdfsPath
(
task
.
getId
());
HDFSUtil
.
getInstance
().
getHdfsName
(
task
.
getId
());
try
{
StopWatch
stopWatch
=
StopWatch
.
create
(
"down"
);
stopWatch
.
start
();
...
...
gic-cloud-data-hook-service/src/test/java/com/gic/ExceTest2.java
View file @
9a40bf8f
...
...
@@ -8,12 +8,8 @@ import com.gic.cloud.data.hook.api.entity.FlatQueryTaskCondition;
import
com.gic.cloud.data.hook.service.DecryptUtils
;
import
com.gic.cloud.data.hook.service.DownloadFunc
;
import
com.gic.cloud.data.hook.service.FileUtil
;
import
com.gic.cloud.data.hook.service.entity.ColumnInfo
;
import
com.gic.cloud.data.hook.service.impl.FlatQueryResultServiceImpl
;
import
com.opencsv.CSVReader
;
import
com.opencsv.CSVReaderBuilder
;
import
com.opencsv.RFC4180Parser
;
import
com.opencsv.RFC4180ParserBuilder
;
import
org.apache.commons.lang.time.DateUtils
;
import
org.apache.commons.lang3.StringUtils
;
import
org.apache.poi.ss.usermodel.*
;
...
...
@@ -21,6 +17,7 @@ import org.apache.poi.xssf.streaming.SXSSFWorkbook;
import
org.slf4j.Logger
;
import
org.slf4j.LoggerFactory
;
import
java.io.BufferedReader
;
import
java.io.File
;
import
java.io.FileOutputStream
;
import
java.io.FileReader
;
...
...
@@ -51,7 +48,6 @@ public class ExceTest2 {
String
lastcells
=
null
;
String
[]
cells
=
null
;
try
{
RFC4180Parser
rfc4180Parser
=
new
RFC4180ParserBuilder
().
build
();
CSVReader
csvReader
=
new
CSVReaderBuilder
(
new
FileReader
(
midFile
)).
build
();
cells
=
csvReader
.
readNext
();
boolean
first
=
true
;
...
...
@@ -81,6 +77,57 @@ public class ExceTest2 {
}
}
/**
* 读物文件
* @param dirName
* @param func
*/
private
static
void
readJsonFile
(
FlatQueryTaskCondition
condition
,
String
dirName
,
DownloadFunc
func
){
File
file
=
new
File
(
SAVE_FOLDER
+
"/"
+
dirName
);
File
[]
files
=
file
.
listFiles
();
List
<
FlatQueryCondition
>
titles
=
null
;
List
<
FlatQueryCondition
>
conditions
=
condition
.
getConditions
();
List
<
String
>
keys
=
conditions
.
stream
().
map
(
mid
->
mid
.
getFieldMark
()).
collect
(
Collectors
.
toList
());
Map
<
String
,
FlatQueryCondition
>
columnInfoMap
=
conditions
.
stream
().
collect
(
Collectors
.
toMap
(
mid
->
mid
.
getFieldMark
(),
mid
->
mid
));
List
<
File
>
fileList
=
Arrays
.
stream
(
files
).
sorted
(
Comparator
.
comparing
(
File:
:
getName
)).
collect
(
Collectors
.
toList
());
for
(
File
midFile
:
fileList
)
{
if
(!
midFile
.
getName
().
endsWith
(
"json"
))
{
continue
;
}
try
{
BufferedReader
reader
=
new
BufferedReader
(
new
FileReader
(
midFile
));
boolean
first
=
true
;
Exception
exception
=
null
;
try
{
String
line
=
reader
.
readLine
();
while
(
line
!=
null
)
{
List
<
String
>
cellList
=
new
ArrayList
<>();
JSONObject
jsonObject
=
JSONObject
.
parseObject
(
line
);
for
(
String
key
:
keys
)
{
String
cellVal
=
jsonObject
.
getString
(
key
);
cellList
.
add
(
cellVal
);
}
String
[]
cells
=
cellList
.
toArray
(
new
String
[]{});
func
.
deal
(
cells
,
conditions
,
first
);
first
=
false
;
line
=
reader
.
readLine
();
}
logger
.
info
(
"读取结束:{}"
,
midFile
.
getName
());
}
catch
(
Exception
e
)
{
exception
=
e
;
}
finally
{
reader
.
close
();
}
if
(
exception
!=
null
)
{
throw
exception
;
}
}
catch
(
Exception
e
)
{
logger
.
info
(
"读取异常:{}"
,
e
);
throw
new
RuntimeException
(
e
);
}
}
}
public
static
class
XlsxFileInfo
{
String
filepath
;
SXSSFWorkbook
workbook
;
...
...
@@ -188,7 +235,7 @@ public class ExceTest2 {
List
<
String
>
xlsxFiles
=
new
ArrayList
<>();
AtomicInteger
count
=
new
AtomicInteger
(
0
);
AtomicReference
<
XlsxFileInfo
>
currentFile
=
new
AtomicReference
<>();
read
Csv
File
(
condition
,
dirName
,(
cells
,
titles
,
firstFlag
)->{
read
Json
File
(
condition
,
dirName
,(
cells
,
titles
,
firstFlag
)->{
if
(
count
.
get
()==
0
)
{
XlsxFileInfo
xlsxFileInfo
=
new
XlsxFileInfo
();
xlsxFileInfo
.
filepath
=
SAVE_FOLDER
+
"/"
+
taskId
+
xlsxFiles
.
size
()
+
".xlsx"
;
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment