Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
G
gic-cloud
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
data-hook
gic-cloud
Commits
9470ea1d
Commit
9470ea1d
authored
Nov 18, 2021
by
陶光胜
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch 'developer' into 'master'
Developer See merge request
!29
parents
9e548db7
686a8ed2
Hide whitespace changes
Inline
Side-by-side
Showing
16 changed files
with
807 additions
and
404 deletions
+807
-404
DhDecryptKey.java
...in/java/com/gic/cloud/data/hook/api/dto/DhDecryptKey.java
+100
-0
DecryptKeyService.java
...om/gic/cloud/data/hook/api/service/DecryptKeyService.java
+9
-0
IFreeQueryService.java
...om/gic/cloud/data/hook/api/service/IFreeQueryService.java
+6
-0
pom.xml
gic-cloud-data-hook-service/pom.xml
+50
-0
DecryptUtils.java
...in/java/com/gic/cloud/data/hook/service/DecryptUtils.java
+1
-1
FilterFieldUtils.java
...ava/com/gic/cloud/data/hook/service/FilterFieldUtils.java
+61
-0
TestHive.java
...c/main/java/com/gic/cloud/data/hook/service/TestHive.java
+20
-22
DhDecryptKeyMapper.java
...m/gic/cloud/data/hook/service/dao/DhDecryptKeyMapper.java
+58
-0
CsvResultSetHelper.java
...ic/cloud/data/hook/service/entity/CsvResultSetHelper.java
+114
-25
DecryptKeyServiceImpl.java
...c/cloud/data/hook/service/impl/DecryptKeyServiceImpl.java
+36
-0
FlatQueryResultServiceImpl.java
...ud/data/hook/service/impl/FlatQueryResultServiceImpl.java
+153
-325
FreeQueryServiceImpl.java
...ic/cloud/data/hook/service/impl/FreeQueryServiceImpl.java
+37
-20
DhDecryptKeyMapper.xml
...-service/src/main/resources/mapper/DhDecryptKeyMapper.xml
+125
-0
FlatQueryTableDao.xml
...k-service/src/main/resources/mapper/FlatQueryTableDao.xml
+5
-4
FlatQueryController.java
...java/com/gic/cloud/data/hook/web/FlatQueryController.java
+5
-6
FreeQueryController.java
...java/com/gic/cloud/data/hook/web/FreeQueryController.java
+27
-1
No files found.
gic-cloud-data-hook-api/src/main/java/com/gic/cloud/data/hook/api/dto/DhDecryptKey.java
0 → 100644
View file @
9470ea1d
package
com
.
gic
.
cloud
.
data
.
hook
.
api
.
dto
;
import
java.util.Date
;
/**
* dh_decrypt_key
*/
public
class
DhDecryptKey
{
/**
*
*/
private
Integer
id
;
/**
*
*/
private
String
enterpriseId
;
/**
*
*/
private
String
publicKey
;
/**
*
*/
private
String
privateKey
;
/**
*
*/
private
Date
createTime
;
/**
*
*/
private
Date
expireTime
;
/**
*
*/
private
Date
updateTime
;
public
Integer
getId
()
{
return
id
;
}
public
void
setId
(
Integer
id
)
{
this
.
id
=
id
;
}
public
String
getEnterpriseId
()
{
return
enterpriseId
;
}
public
void
setEnterpriseId
(
String
enterpriseId
)
{
this
.
enterpriseId
=
enterpriseId
;
}
public
String
getPublicKey
()
{
return
publicKey
;
}
public
void
setPublicKey
(
String
publicKey
)
{
this
.
publicKey
=
publicKey
;
}
public
String
getPrivateKey
()
{
return
privateKey
;
}
public
void
setPrivateKey
(
String
privateKey
)
{
this
.
privateKey
=
privateKey
;
}
public
Date
getCreateTime
()
{
return
createTime
;
}
public
void
setCreateTime
(
Date
createTime
)
{
this
.
createTime
=
createTime
;
}
public
Date
getExpireTime
()
{
return
expireTime
;
}
public
void
setExpireTime
(
Date
expireTime
)
{
this
.
expireTime
=
expireTime
;
}
public
Date
getUpdateTime
()
{
return
updateTime
;
}
public
void
setUpdateTime
(
Date
updateTime
)
{
this
.
updateTime
=
updateTime
;
}
}
\ No newline at end of file
gic-cloud-data-hook-api/src/main/java/com/gic/cloud/data/hook/api/service/DecryptKeyService.java
0 → 100644
View file @
9470ea1d
package
com
.
gic
.
cloud
.
data
.
hook
.
api
.
service
;
import
com.gic.cloud.data.hook.api.dto.DhDecryptKey
;
public
interface
DecryptKeyService
{
void
saveDecryKey
(
String
enterpriseId
,
String
publicKey
,
String
privateKey
);
DhDecryptKey
getKeyByEnterpriseId
(
String
enterpriseId
);
}
gic-cloud-data-hook-api/src/main/java/com/gic/cloud/data/hook/api/service/IFreeQueryService.java
View file @
9470ea1d
...
...
@@ -79,4 +79,10 @@ public interface IFreeQueryService {
FreeQuerySource
getFreeQuerySource
(
String
enterpriseId
);
void
initTask
();
List
<
String
>
listFilterPhoneField
();
List
<
String
>
listFilterPhoneAndCardNoField
();
List
<
String
>
listFilterUserName
();
}
gic-cloud-data-hook-service/pom.xml
View file @
9470ea1d
...
...
@@ -39,16 +39,34 @@
<groupId>
org.apache.poi
</groupId>
<artifactId>
poi
</artifactId>
<version>
3.10-FINAL
</version>
<exclusions>
<exclusion>
<artifactId>
commons-codec
</artifactId>
<groupId>
commons-codec
</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>
com.gic
</groupId>
<artifactId>
gic-cloud-data-hook-api
</artifactId>
<version>
${gic-cloud-data-hook-api}
</version>
<exclusions>
<exclusion>
<artifactId>
httpclient
</artifactId>
<groupId>
org.apache.httpcomponents
</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>
com.gic
</groupId>
<artifactId>
data-shield-sdk
</artifactId>
<version>
1.0-SNAPSHOT
</version>
<exclusions>
<exclusion>
<artifactId>
httpclient
</artifactId>
<groupId>
org.apache.httpcomponents
</groupId>
</exclusion>
</exclusions>
</dependency>
<!--<dependency>
<groupId>data-shield</groupId>
...
...
@@ -92,6 +110,14 @@
<groupId>
log4j
</groupId>
<artifactId>
log4j
</artifactId>
</exclusion>
<exclusion>
<artifactId>
httpclient
</artifactId>
<groupId>
org.apache.httpcomponents
</groupId>
</exclusion>
<exclusion>
<artifactId>
commons-codec
</artifactId>
<groupId>
commons-codec
</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
...
...
@@ -107,6 +133,30 @@
<groupId>
log4j
</groupId>
<artifactId>
log4j
</artifactId>
</exclusion>
<exclusion>
<artifactId>
httpclient
</artifactId>
<groupId>
org.apache.httpcomponents
</groupId>
</exclusion>
<exclusion>
<artifactId>
commons-codec
</artifactId>
<groupId>
commons-codec
</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>
cn.hutool
</groupId>
<artifactId>
hutool-all
</artifactId>
<version>
5.5.8
</version>
</dependency>
<dependency>
<groupId>
com.gic
</groupId>
<artifactId>
gic-thirdparty-sdk
</artifactId>
<version>
${gic-thirdparty-sdk}
</version>
<exclusions>
<exclusion>
<artifactId>
commons-codec
</artifactId>
<groupId>
commons-codec
</groupId>
</exclusion>
</exclusions>
</dependency>
...
...
gic-cloud-data-hook-service/src/main/java/com/gic/cloud/data/hook/service/DecryptUtils.java
View file @
9470ea1d
...
...
@@ -60,7 +60,7 @@ public class DecryptUtils {
}
for
(
FlatQueryCondition
condition
:
allFields
){
if
(
condition
.
getFieldMark
().
equals
(
columnName
)){
if
(
F
reeQueryServiceImpl
.
FILETERS_USER_NAME
.
contains
(
condition
.
getFieldName
())){
if
(
F
ilterFieldUtils
.
FILETERS_USER_NAME
.
contains
(
condition
.
getFieldName
())){
return
true
;
}
}
...
...
gic-cloud-data-hook-service/src/main/java/com/gic/cloud/data/hook/service/FilterFieldUtils.java
0 → 100644
View file @
9470ea1d
package
com
.
gic
.
cloud
.
data
.
hook
.
service
;
import
com.ctrip.framework.apollo.Config
;
import
com.ctrip.framework.apollo.ConfigService
;
import
org.apache.commons.lang3.StringUtils
;
import
org.apache.commons.lang3.concurrent.BasicThreadFactory
;
import
org.springframework.stereotype.Component
;
import
javax.annotation.PostConstruct
;
import
java.util.ArrayList
;
import
java.util.Arrays
;
import
java.util.List
;
import
java.util.concurrent.ScheduledExecutorService
;
import
java.util.concurrent.ScheduledThreadPoolExecutor
;
import
java.util.concurrent.TimeUnit
;
@Component
public
class
FilterFieldUtils
{
/** 脱敏字段 */
public
static
List
<
String
>
FILTERS_PHONE_ONLY
=
Arrays
.
asList
(
"mobile"
,
"phone"
,
"enterprise_name"
,
"phone_number"
,
"receive_phone_number"
,
"use_phone_number"
,
"friend_phone_num"
,
"from_phone_num"
);
/** 脱敏字段 */
public
static
List
<
String
>
FILTERS_PHONE_AND_CARD
=
Arrays
.
asList
(
"card_num"
,
"mobile"
,
"phone"
,
"enterprise_name"
,
"phone_number"
,
"receive_phone_number"
,
"receive_card_num"
,
"use_phone_number"
,
"use_card_num"
,
"friend_card_num"
,
"from_card_num"
,
"friend_phone_num"
,
"from_phone_num"
);
public
static
List
<
String
>
FILETERS_USER_NAME
=
Arrays
.
asList
(
"member_name"
,
"children_name"
,
"mbr_name"
,
"receive_member_name"
,
"use_member_name"
,
"name"
,
"bb_name"
,
"friend_mbr_name"
,
"from_mbr_name"
);
ScheduledExecutorService
timer
=
new
ScheduledThreadPoolExecutor
(
1
,
new
BasicThreadFactory
.
Builder
().
namingPattern
(
"loadFilterFieldTimer-%d"
).
daemon
(
true
).
build
());
@PostConstruct
public
void
init
(){
initTimer
();
}
private
void
initTimer
(){
timer
.
scheduleAtFixedRate
(
new
Runnable
()
{
@Override
public
void
run
()
{
Config
config
=
ConfigService
.
getConfig
(
"application"
);
String
phoneNumberField
=
config
.
getProperty
(
"phoneNumberField"
,
""
);
String
cardNumField
=
config
.
getProperty
(
"cardNumField"
,
""
);
String
userNameField
=
config
.
getProperty
(
"userNameField"
,
""
);
if
(
StringUtils
.
isNotBlank
(
phoneNumberField
)){
FILTERS_PHONE_ONLY
=
Arrays
.
asList
(
phoneNumberField
.
split
(
","
));
FILTERS_PHONE_AND_CARD
=
new
ArrayList
<>();
for
(
String
s
:
phoneNumberField
.
split
(
","
)){
FILTERS_PHONE_AND_CARD
.
add
(
s
);
}
}
if
(
StringUtils
.
isNotBlank
(
cardNumField
)){
for
(
String
s
:
cardNumField
.
split
(
","
)){
FILTERS_PHONE_AND_CARD
.
add
(
s
);
}
}
if
(
StringUtils
.
isNotBlank
(
userNameField
)){
FILETERS_USER_NAME
=
Arrays
.
asList
(
userNameField
.
split
(
","
));
}
}
},
5
,
5
,
TimeUnit
.
SECONDS
);
}
}
gic-cloud-data-hook-service/src/main/java/com/gic/cloud/data/hook/service/TestHive.java
View file @
9470ea1d
...
...
@@ -2,8 +2,11 @@ package com.gic.cloud.data.hook.service;
import
com.gic.cloud.data.hook.api.service.IFlatQueryTableService
;
import
com.gic.qcloud.BucketNameEnum
;
import
com.gic.qcloud.FileUploadUtil
;
import
org.slf4j.LoggerFactory
;
import
java.io.File
;
import
java.sql.Connection
;
import
java.sql.DriverManager
;
import
java.sql.ResultSet
;
...
...
@@ -16,29 +19,24 @@ public class TestHive {
private
static
org
.
slf4j
.
Logger
logger
=
LoggerFactory
.
getLogger
(
IFlatQueryTableService
.
class
);
public
static
void
main
(
String
[]
args
)
{
// String url = "jdbc:hive2://115.159.205.44:10015/data_test";
// try {
// System.out.println("准备连接");
// Class.forName("org.apache.hive.jdbc.HiveDriver");
// Connection conn = DriverManager.getConnection(url, "hadoop", "");
// System.out.println("连接成功");
// String sql = "show tables";
// System.out.println("SQL: "+sql);
// Statement stmt = conn.createStatement();
// ResultSet rs = stmt.executeQuery(sql);
// while(rs.next()){
// System.out.println(rs.getString(1));
// }
//
// System.out.println("处理结束");
// } catch (Exception e) {
// e.printStackTrace();
// }
try
{
Class
.
forName
(
"org.apache.logging.log4j.core.LoggerContext"
);
}
catch
(
ClassNotFoundException
e
)
{
e
.
printStackTrace
();
String
sql
=
"select from tab_gic_store limit 10000"
;
String
curLimit
=
""
;
if
(
sql
.
indexOf
(
"limit"
)>
0
||
sql
.
indexOf
(
"LIMIT"
)>
0
){
int
start
=
0
;
if
(
sql
.
indexOf
(
"limit"
)>
0
){
start
=
sql
.
indexOf
(
"limit"
)
+
6
;
}
else
{
start
=
sql
.
indexOf
(
"LIMIT"
)
+
6
;
}
curLimit
=
sql
.
substring
(
start
);
if
(
curLimit
.
indexOf
(
","
)
>
0
){
curLimit
=
curLimit
.
substring
(
curLimit
.
indexOf
(
","
)+
1
);
}
if
(
Integer
.
valueOf
(
curLimit
)
>
1000
){
curLimit
=
1000
+
""
;
}
}
System
.
out
.
println
(
"sql = "
+
sql
+
" limit "
+
curLimit
);
}
...
...
gic-cloud-data-hook-service/src/main/java/com/gic/cloud/data/hook/service/dao/DhDecryptKeyMapper.java
0 → 100644
View file @
9470ea1d
package
com
.
gic
.
cloud
.
data
.
hook
.
service
.
dao
;
import
com.gic.cloud.data.hook.api.dto.DhDecryptKey
;
import
org.apache.ibatis.annotations.Param
;
public
interface
DhDecryptKeyMapper
{
/**
* 根据主键删除
*
* @param id 主键
* @return 更新条目数
*/
int
deleteByPrimaryKey
(
Integer
id
);
/**
* 插入一条记录
*
* @param record 实体对象
* @return 更新条目数
*/
int
insert
(
DhDecryptKey
record
);
/**
* 动态插入一条记录
*
* @param record 实体对象
* @return 更新条目数
*/
int
insertSelective
(
DhDecryptKey
record
);
/**
* 根据主键查询
*
* @param id 主键
* @return 实体对象
*/
DhDecryptKey
selectByPrimaryKey
(
Integer
id
);
/**
* 根据主键动态更新记录
*
* @param record 实体对象
* @return 更新条目数
*/
int
updateByPrimaryKeySelective
(
DhDecryptKey
record
);
/**
* 根据主键更新记录
*
* @param record 实体对象
* @return 更新条目数
*/
int
updateByPrimaryKey
(
DhDecryptKey
record
);
DhDecryptKey
selectByEnterpriseId
(
@Param
(
"enterpriseId"
)
String
enterpriseId
);
}
gic-cloud-data-hook-service/src/main/java/com/gic/cloud/data/hook/service/entity/CsvResultSetHelper.java
View file @
9470ea1d
package
com
.
gic
.
cloud
.
data
.
hook
.
service
.
entity
;
import
cn.hutool.core.codec.Base64
;
import
cn.hutool.core.util.CharsetUtil
;
import
cn.hutool.core.util.StrUtil
;
import
cn.hutool.crypto.SecureUtil
;
import
cn.hutool.crypto.asymmetric.KeyType
;
import
cn.hutool.crypto.asymmetric.RSA
;
import
com.alibaba.dubbo.common.utils.StringUtils
;
import
com.alibaba.fastjson.JSON
;
import
com.ctrip.framework.apollo.Config
;
import
com.ctrip.framework.apollo.ConfigService
;
import
com.gic.cloud.data.hook.api.dto.DhDecryptKey
;
import
com.gic.cloud.data.hook.api.entity.FlatQueryCondition
;
import
com.gic.cloud.data.hook.api.service.DecryptKeyService
;
import
com.gic.cloud.data.hook.service.DecryptUtils
;
import
com.gic.cloud.data.hook.service.
impl.FreeQueryServiceImpl
;
import
com.gic.cloud.data.hook.service.
FilterFieldUtils
;
import
com.opencsv.ResultSetHelper
;
import
org.apache.commons.collections.CollectionUtils
;
import
java.io.IOException
;
import
java.io.UnsupportedEncodingException
;
import
java.security.KeyPair
;
import
java.sql.ResultSet
;
import
java.sql.ResultSetMetaData
;
import
java.sql.SQLException
;
import
java.sql.Types
;
import
java.text.SimpleDateFormat
;
import
java.util.ArrayList
;
import
java.util.Arrays
;
import
java.util.HashMap
;
import
java.util.List
;
...
...
@@ -25,25 +40,60 @@ public class CsvResultSetHelper implements ResultSetHelper {
private
List
<
String
>
filters
=
null
;
private
List
<
FlatQueryCondition
>
allFields
=
null
;
private
String
enterpriseId
;
private
List
<
String
>
decryptEnterpriseIdList
=
new
ArrayList
<>();
private
boolean
needDecryptByEnt
=
false
;
private
DecryptKeyService
decryptKeyService
;
private
String
publicKey
;
private
RSA
rsa
;
public
CsvResultSetHelper
(
String
filterMode
,
List
<
String
>
filters
,
List
<
FlatQueryCondition
>
allFields
)
{
public
CsvResultSetHelper
(
String
filterMode
,
List
<
String
>
filters
,
List
<
FlatQueryCondition
>
allFields
,
String
enterpriseId
,
DecryptKeyService
decryptKeyService
)
{
this
.
filterMode
=
filterMode
;
this
.
filters
=
filters
;
this
.
allFields
=
allFields
;
this
.
enterpriseId
=
enterpriseId
;
this
.
decryptKeyService
=
decryptKeyService
;
Config
config
=
ConfigService
.
getConfig
(
"application"
);
String
decryptEnterpriseIds
=
config
.
getProperty
(
"decryptEnterpriseIds"
,
""
);
if
(
org
.
apache
.
commons
.
lang
.
StringUtils
.
isNotBlank
(
decryptEnterpriseIds
)
&&
allFields
!=
null
){
decryptEnterpriseIdList
=
Arrays
.
asList
(
decryptEnterpriseIds
.
split
(
" "
));
if
(
decryptEnterpriseIdList
.
contains
(
enterpriseId
)){
needDecryptByEnt
=
true
;
//创建密钥
DhDecryptKey
key
=
decryptKeyService
.
getKeyByEnterpriseId
(
enterpriseId
);
rsa
=
new
RSA
();
if
(
key
==
null
){
decryptKeyService
.
saveDecryKey
(
enterpriseId
,
rsa
.
getPublicKeyBase64
(),
rsa
.
getPrivateKeyBase64
());
publicKey
=
rsa
.
getPublicKeyBase64
();
}
else
{
publicKey
=
key
.
getPublicKey
();
rsa
=
new
RSA
(
key
.
getPrivateKey
(),
key
.
getPublicKey
());
}
}
}
}
@Override
public
String
[]
getColumnNames
(
ResultSet
resultSet
)
throws
SQLException
{
String
[]
result
=
new
String
[
0
]
;
List
<
String
>
list
=
new
ArrayList
<>()
;
int
columnCount
=
resultSet
.
getMetaData
().
getColumnCount
();
if
(
columnCount
>
0
)
{
result
=
new
String
[
columnCount
];
for
(
int
i
=
0
;
i
<
columnCount
;
i
++)
{
String
rsColumnName
=
resultSet
.
getMetaData
().
getColumnLabel
(
i
+
1
);
result
[
i
]
=
rsColumnName
;
}
// FOR OVER
}
// IF OVER
return
result
;
list
.
add
(
rsColumnName
);
}
}
if
(
needDecryptByEnt
){
//补齐加密字段
for
(
FlatQueryCondition
condition
:
allFields
){
if
(
FilterFieldUtils
.
FILTERS_PHONE_ONLY
.
contains
(
condition
.
getFieldName
())){
list
.
add
(
condition
.
getFieldMark
());
}
}
}
return
list
.
toArray
(
new
String
[
0
]);
}
@Override
...
...
@@ -65,12 +115,12 @@ public class CsvResultSetHelper implements ResultSetHelper {
@Override
public
String
[]
getColumnValues
(
ResultSet
resultSet
,
boolean
b
,
String
s
,
String
s1
)
throws
SQLException
,
IOException
{
String
[]
result
=
new
String
[
0
];
List
<
String
>
result
=
new
ArrayList
<>();
List
<
String
>
decyptValue
=
new
ArrayList
<>();
int
columnCount
=
resultSet
.
getMetaData
().
getColumnCount
();
if
(
columnCount
>
0
)
{
result
=
new
String
[
columnCount
];
for
(
int
i
=
0
;
i
<
columnCount
;
i
++)
{
String
columnName
=
resultSet
.
getMetaData
().
getColumn
Label
(
i
+
1
);
String
columnName
=
resultSet
.
getMetaData
().
getColumn
Name
(
i
+
1
);
// 数据处理判断
boolean
doDesensi
=
false
;
// 是否进行脱敏
boolean
doDecrypt
=
false
;
// 是否进行解密
...
...
@@ -83,7 +133,7 @@ public class CsvResultSetHelper implements ResultSetHelper {
if
(
allFields
!=
null
&&
isName
(
columnName
)){
doDesensi
=
true
;
}
if
(
allFields
==
null
&&
F
reeQueryServiceImpl
.
FILETERS_USER_NAME
.
contains
(
columnName
)){
if
(
allFields
==
null
&&
F
ilterFieldUtils
.
FILETERS_USER_NAME
.
contains
(
columnName
)){
doDesensi
=
true
;
}
}
else
if
(
this
.
filterMode
.
equals
(
CsvDataFilterMode
.
DECRYPT
))
{
// 如果需要解密
...
...
@@ -95,48 +145,62 @@ public class CsvResultSetHelper implements ResultSetHelper {
if
(
allFields
!=
null
&&
isName
(
columnName
)){
doDecrypt
=
true
;
}
if
(
allFields
==
null
&&
F
reeQueryServiceImpl
.
FILETERS_USER_NAME
.
contains
(
columnName
)){
if
(
allFields
==
null
&&
F
ilterFieldUtils
.
FILETERS_USER_NAME
.
contains
(
columnName
)){
doDecrypt
=
true
;
}
}
// 数据处理
if
(
doDesensi
)
{
// 如果需要脱敏处理
if
(
allFields
!=
null
&&
isName
(
columnName
)){
result
[
i
]
=
DecryptUtils
.
dataSecurityProcessUserName
(
DecryptUtils
.
decrypt
(
resultSet
.
getString
(
columnName
)));
}
else
if
(
allFields
==
null
&&
F
reeQueryServiceImpl
.
FILETERS_USER_NAME
.
contains
(
columnName
))
{
result
[
i
]
=
DecryptUtils
.
dataSecurityProcessUserName
(
resultSet
.
getString
(
columnName
));
result
.
add
(
DecryptUtils
.
dataSecurityProcessUserName
(
DecryptUtils
.
decrypt
(
resultSet
.
getString
(
columnName
)
)));
}
else
if
(
allFields
==
null
&&
F
ilterFieldUtils
.
FILETERS_USER_NAME
.
contains
(
columnName
))
{
result
.
add
(
DecryptUtils
.
dataSecurityProcessUserName
(
resultSet
.
getString
(
i
+
1
)
));
}
else
{
result
[
i
]
=
"******"
;
result
.
add
(
"******"
);
if
(
needDecryptByEnt
&&
isPhoneNumber
(
columnName
)){
//导出数据中需要增加加密列,先解密再加密
String
value
=
DecryptUtils
.
getInstance
().
decrypt
(
resultSet
.
getString
(
i
+
1
));
if
(
org
.
apache
.
commons
.
lang3
.
StringUtils
.
isNotBlank
(
value
)){
byte
[]
bytes
=
rsa
.
encrypt
(
StrUtil
.
bytes
(
value
,
CharsetUtil
.
UTF_8
),
KeyType
.
PublicKey
);
String
encode
=
Base64
.
encode
(
bytes
);
decyptValue
.
add
(
encode
);
}
else
{
decyptValue
.
add
(
""
);
}
}
}
}
else
if
(
doDecrypt
)
{
// 如果需要解密处理
//System.out.println("CSV 解密字段名 " + columnName);
String
tmpResult
=
resultSet
.
getString
(
columnName
);
String
tmpResult
=
resultSet
.
getString
(
i
+
1
);
if
(
tmpResult
!=
null
&&
tmpResult
.
length
()
>
0
)
{
//tmpResult = DecryptUtils.getInstance().decrypt(tmpResult);
tmpResult
=
DecryptUtils
.
getInstance
().
decrypt
(
tmpResult
);
}
// IF OVER
result
[
i
]
=
tmpResult
;
result
.
add
(
tmpResult
)
;
}
else
{
int
columnType
=
resultSet
.
getMetaData
().
getColumnType
(
i
+
1
);
switch
(
columnType
)
{
case
Types
.
TIMESTAMP
:
result
[
i
]
=
resultSet
.
getTimestamp
(
columnName
)
!=
null
?
datetimeFormatter
.
format
(
resultSet
.
getTimestamp
(
columnName
))
:
""
;
result
.
add
(
resultSet
.
getTimestamp
(
i
+
1
)
!=
null
?
datetimeFormatter
.
format
(
resultSet
.
getTimestamp
(
i
+
1
))
:
""
)
;
break
;
case
Types
.
DATE
:
//result[i] = resultSet.getTimestamp(columnName) != null ? dateFormatter.format(resultSet.getTimestamp(columnName)) : "";
result
[
i
]
=
resultSet
.
getDate
(
columnName
)
!=
null
?
dateFormatter
.
format
(
resultSet
.
getDate
(
columnName
))
:
""
;
result
.
add
(
resultSet
.
getDate
(
i
+
1
)
!=
null
?
dateFormatter
.
format
(
resultSet
.
getDate
(
i
+
1
))
:
""
)
;
break
;
case
Types
.
TIME
:
result
[
i
]
=
resultSet
.
getTimestamp
(
columnName
)
!=
null
?
timeFormatter
.
format
(
resultSet
.
getTimestamp
(
columnName
))
:
""
;
result
.
add
(
resultSet
.
getTimestamp
(
i
+
1
)
!=
null
?
timeFormatter
.
format
(
resultSet
.
getTimestamp
(
i
+
1
))
:
""
)
;
break
;
default
:
result
[
i
]
=
String
.
valueOf
(
resultSet
.
getObject
(
columnName
));
result
.
add
(
String
.
valueOf
(
resultSet
.
getObject
(
i
+
1
)
));
break
;
}
// SWITCH OVER
}
// IF ELSE OVER
}
// FOR OVER
}
// IF OVER
return
result
;
if
(
CollectionUtils
.
isNotEmpty
(
decyptValue
)){
result
.
addAll
(
decyptValue
);
}
return
result
.
toArray
(
new
String
[
0
]);
}
private
boolean
isName
(
String
columnName
){
...
...
@@ -145,11 +209,36 @@ public class CsvResultSetHelper implements ResultSetHelper {
}
for
(
FlatQueryCondition
condition
:
allFields
){
if
(
condition
.
getFieldMark
().
equals
(
columnName
)){
if
(
F
reeQueryServiceImpl
.
FILETERS_USER_NAME
.
contains
(
condition
.
getFieldName
())){
if
(
F
ilterFieldUtils
.
FILETERS_USER_NAME
.
contains
(
condition
.
getFieldName
())){
return
true
;
}
}
}
return
false
;
}
private
boolean
isPhoneNumber
(
String
columnName
){
if
(
StringUtils
.
isBlank
(
columnName
)){
return
false
;
}
for
(
FlatQueryCondition
condition
:
allFields
){
if
(
condition
.
getFieldMark
().
equals
(
columnName
)){
if
(
FilterFieldUtils
.
FILTERS_PHONE_ONLY
.
contains
(
condition
.
getFieldName
())){
return
true
;
}
}
}
return
false
;
}
public
static
void
main
(
String
[]
args
)
throws
UnsupportedEncodingException
{
RSA
rsa
=
new
RSA
();
String
value
=
"18989474184"
;
byte
[]
s
=
rsa
.
encrypt
(
StrUtil
.
bytes
(
value
,
CharsetUtil
.
UTF_8
),
KeyType
.
PublicKey
);
String
encode
=
Base64
.
encode
(
s
);
System
.
out
.
println
(
"encode = "
+
encode
);
byte
[]
decode
=
Base64
.
decode
(
encode
.
getBytes
());
String
s1
=
new
String
(
rsa
.
decrypt
(
decode
,
KeyType
.
PrivateKey
));
System
.
out
.
println
(
"s1 = "
+
s1
);
}
}
gic-cloud-data-hook-service/src/main/java/com/gic/cloud/data/hook/service/impl/DecryptKeyServiceImpl.java
0 → 100644
View file @
9470ea1d
package
com
.
gic
.
cloud
.
data
.
hook
.
service
.
impl
;
import
com.gic.cloud.data.hook.api.service.DecryptKeyService
;
import
com.gic.cloud.data.hook.service.dao.DhDecryptKeyMapper
;
import
com.gic.cloud.data.hook.api.dto.DhDecryptKey
;
import
org.springframework.beans.factory.annotation.Autowired
;
import
org.springframework.stereotype.Service
;
import
java.util.Calendar
;
import
java.util.Date
;
@Service
(
"decryptKeyService"
)
public
class
DecryptKeyServiceImpl
implements
DecryptKeyService
{
@Autowired
private
DhDecryptKeyMapper
dhDecryptKeyMapper
;
@Override
public
void
saveDecryKey
(
String
enterpriseId
,
String
publicKey
,
String
privateKey
)
{
DhDecryptKey
key
=
new
DhDecryptKey
();
key
.
setEnterpriseId
(
enterpriseId
);
key
.
setPublicKey
(
publicKey
);
key
.
setPrivateKey
(
privateKey
);
key
.
setCreateTime
(
new
Date
());
key
.
setUpdateTime
(
new
Date
());
Calendar
calendar
=
Calendar
.
getInstance
();
calendar
.
set
(
Calendar
.
DAY_OF_MONTH
,
calendar
.
getActualMaximum
(
Calendar
.
DAY_OF_MONTH
));
Date
time
=
calendar
.
getTime
();
key
.
setExpireTime
(
time
);
dhDecryptKeyMapper
.
insertSelective
(
key
);
}
@Override
public
DhDecryptKey
getKeyByEnterpriseId
(
String
enterpriseId
)
{
return
dhDecryptKeyMapper
.
selectByEnterpriseId
(
enterpriseId
);
}
}
gic-cloud-data-hook-service/src/main/java/com/gic/cloud/data/hook/service/impl/FlatQueryResultServiceImpl.java
View file @
9470ea1d
...
...
@@ -7,6 +7,7 @@ import com.ctrip.framework.apollo.ConfigService;
import
com.gic.cloud.common.api.base.Page
;
import
com.gic.cloud.data.hook.api.dto.*
;
import
com.gic.cloud.data.hook.api.entity.*
;
import
com.gic.cloud.data.hook.api.service.DecryptKeyService
;
import
com.gic.cloud.data.hook.api.service.IDownloadTaskService
;
import
com.gic.cloud.data.hook.api.service.IFlatQueryResultService
;
import
com.gic.cloud.data.hook.service.*
;
...
...
@@ -14,6 +15,8 @@ import com.gic.cloud.data.hook.service.dao.FlatQueryTableDao;
import
com.gic.cloud.data.hook.service.entity.CsvDataFilterMode
;
import
com.gic.cloud.data.hook.service.entity.CsvResultSetHelper
;
import
com.gic.qcloud.BucketNameEnum
;
import
com.gic.qcloud.FileUploadUtil
;
import
com.google.common.collect.Lists
;
import
com.opencsv.CSVWriter
;
import
com.opencsv.ResultSetHelper
;
...
...
@@ -65,6 +68,8 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
FlatQueryTableDao
flatQueryTableDao
;
@Autowired
IDownloadTaskService
downloadTaskService
;
@Autowired
DecryptKeyService
decryptKeyService
;
/** 自助指标查询关联的下载条件列表 */
protected
List
<
FlatQueryTaskCondition
>
taskConditions
=
Lists
.
newArrayList
();
...
...
@@ -274,19 +279,19 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
if
(
queryDataType
==
QueryDataType
.
FULL
){
String
preResult
=
rs
.
getString
(
fieldColumnIndex
);
if
(
dataPermission
==
1
&&
F
reeQueryServiceImpl
.
FILTERS_PHONE_ONLY
.
contains
(
fieldName
)){
if
(
dataPermission
==
1
&&
F
ilterFieldUtils
.
FILTERS_PHONE_ONLY
.
contains
(
fieldName
)){
fieldResult
=
DecryptUtils
.
decrypt
(
preResult
);
}
else
if
(
dataPermission
==
2
&&
F
reeQueryServiceImpl
.
FILTERS_PHONE_AND_CARD
.
contains
(
fieldName
)){
}
else
if
(
dataPermission
==
2
&&
F
ilterFieldUtils
.
FILTERS_PHONE_AND_CARD
.
contains
(
fieldName
)){
fieldResult
=
DecryptUtils
.
decrypt
(
preResult
);
}
else
if
(
F
reeQueryServiceImpl
.
FILETERS_USER_NAME
.
contains
(
fieldName
)){
}
else
if
(
F
ilterFieldUtils
.
FILETERS_USER_NAME
.
contains
(
fieldName
)){
fieldResult
=
DecryptUtils
.
decrypt
(
preResult
);
}
else
fieldResult
=
rs
.
getObject
(
fieldColumnIndex
);
}
else
{
if
(
dataPermission
==
1
&&
F
reeQueryServiceImpl
.
FILTERS_PHONE_ONLY
.
contains
(
fieldName
)){
if
(
dataPermission
==
1
&&
F
ilterFieldUtils
.
FILTERS_PHONE_ONLY
.
contains
(
fieldName
)){
fieldResult
=
"******"
;
}
else
if
(
dataPermission
==
2
&&
F
reeQueryServiceImpl
.
FILTERS_PHONE_AND_CARD
.
contains
(
fieldName
)){
}
else
if
(
dataPermission
==
2
&&
F
ilterFieldUtils
.
FILTERS_PHONE_AND_CARD
.
contains
(
fieldName
)){
fieldResult
=
"******"
;
}
else
if
(
F
reeQueryServiceImpl
.
FILETERS_USER_NAME
.
contains
(
fieldName
)){
}
else
if
(
F
ilterFieldUtils
.
FILETERS_USER_NAME
.
contains
(
fieldName
)){
fieldResult
=
DecryptUtils
.
dataSecurityProcessUserName
(
DecryptUtils
.
decrypt
(
rs
.
getObject
(
fieldColumnIndex
)+
""
));
}
else
fieldResult
=
rs
.
getObject
(
fieldColumnIndex
);
}
...
...
@@ -543,166 +548,7 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
break
;
}
// IF OVER
}
// FOR OVER
if
(
condition
!=
null
)
{
// 更新任务状态
DownloadTask
task
=
DownloadTaskServiceImpl
.
getInstance
().
getDownloadTaskById
(
condition
.
getTaskId
());
task
.
setStatus
(
DownloadTaskStatus
.
BUILDING
);
DownloadTaskServiceImpl
.
getInstance
().
updateDownloadTask
(
task
);
log
.
debug
(
"runDownloadTask.run"
,
"自助指标下载任务执行:"
+
task
.
getId
());
String
fullQuery
=
buildFlatQuerySQL
(
false
,
// 下载用途
condition
.
getTableId
(),
condition
.
getEnterpriseIds
(),
condition
.
getConditions
(),
condition
.
getOrderField
(),
condition
.
getOrderDir
(),
condition
.
getExecDistinct
(),
0
,
condition
.
getAuthStoreIdList
());
Connection
conn
=
HiveHelper
.
getDownloadHiveConnection
();
log
.
debug
(
"runDownloadTask.run"
,
"获取商户连接:"
+
task
.
getId
());
if
(
conn
!=
null
)
{
try
{
Statement
stat
=
conn
.
createStatement
();
// stat.setQueryTimeout(60 * 1000);
stat
.
execute
(
"REFRESH TABLE "
+
condition
.
getTableId
());
// 强制刷新表结构
ResultSet
rs
=
stat
.
executeQuery
(
fullQuery
);
// 生成指定格式下载元文件
String
originalFilePath
=
""
;
if
(
task
.
getFormat
().
equals
(
DownloadFileFormat
.
CSV
))
{
// 如果指定为 CSV 格式
log
.
debug
(
"runDownloadTask.run"
,
"准备生成自助指标下载文件 "
+
condition
.
getTaskId
()
+
".csv"
);
originalFilePath
=
SAVE_FOLDER
+
"/"
+
condition
.
getTaskId
()
+
".csv"
;
File
tmp
=
new
File
(
originalFilePath
);
if
(
tmp
.
exists
())
{
// 删除可能存在的文件
tmp
.
delete
();
}
//CSVWriter csvWriter = new CSVWriter(new FileWriter(csvPath), '\t');
OutputStreamWriter
out
=
new
OutputStreamWriter
(
new
FileOutputStream
(
originalFilePath
),
Charset
.
forName
(
"GBK"
));
ResultSetHelper
helper
=
new
CsvResultSetHelper
(
task
.
getQueryDataType
()
==
QueryDataType
.
FULL
?
CsvDataFilterMode
.
DECRYPT
:
CsvDataFilterMode
.
DESENSI
,
condition
.
getDecryptFilters
(),
condition
.
getConditions
());
CSVWriter
writer
=
new
CSVWriter
(
out
,
','
);
writer
.
setResultService
(
helper
);
writer
.
writeAll
(
rs
,
true
);
writer
.
close
();
out
.
close
();
//记得关闭资源
log
.
debug
(
"runDownloadTask.run"
,
"已生成自助指标下载文件 "
+
condition
.
getTaskId
()
+
".csv"
);
}
else
{
// 如果指定为 XLS 格式
log
.
debug
(
"runDownloadTask.run"
,
"准备生成自助指标下载文件 "
+
condition
.
getTaskId
()
+
".xlsx"
);
originalFilePath
=
SAVE_FOLDER
+
"/"
+
condition
.
getTaskId
()
+
".xlsx"
;
SXSSFWorkbook
wb
=
new
SXSSFWorkbook
(
100
);
// 内存中保留 100 行
Sheet
sheet
=
wb
.
createSheet
();
Row
row
=
sheet
.
createRow
(
0
);
Cell
cell
;
for
(
int
j
=
0
;
j
<
rs
.
getMetaData
().
getColumnCount
();
++
j
)
{
// 遍历创建表头
String
colName
=
rs
.
getMetaData
().
getColumnLabel
(
j
+
1
);
cell
=
row
.
createCell
(
j
);
cell
.
setCellValue
(
colName
);
}
// 遍历输出行
int
rowCount
=
0
;
while
(
rs
.
next
())
{
rowCount
++;
row
=
sheet
.
createRow
(
rowCount
);
for
(
int
j
=
0
;
j
<
rs
.
getMetaData
().
getColumnCount
();
++
j
)
{
//String c = rs.getString(j + 1);
//row.createCell(j).setCellValue(c);
String
cName
=
rs
.
getMetaData
().
getColumnName
(
j
+
1
);
List
<
String
>
cFilters
=
condition
.
getDecryptFilters
();
if
(
task
.
getQueryDataType
()
==
QueryDataType
.
FULL
&&
(
cFilters
.
contains
(
cName
)
||
DecryptUtils
.
isName
(
cName
,
condition
.
getConditions
())))
{
String
tmpResult
=
rs
.
getString
(
j
+
1
);
if
(
StringUtils
.
isNotBlank
(
tmpResult
))
tmpResult
=
DecryptUtils
.
getInstance
().
decrypt
(
tmpResult
);
row
.
createCell
(
j
).
setCellValue
(
tmpResult
);
}
else
{
int
cType
=
rs
.
getMetaData
().
getColumnType
(
j
+
1
);
switch
(
cType
)
{
case
Types
.
TIMESTAMP
:
row
.
createCell
(
j
).
setCellValue
(
rs
.
getTimestamp
(
j
+
1
)
!=
null
?
datetimeFormatter
.
format
(
rs
.
getTimestamp
(
j
+
1
))
:
""
);
break
;
case
Types
.
DATE
:
row
.
createCell
(
j
).
setCellValue
(
rs
.
getDate
(
j
+
1
)
!=
null
?
dateFormatter
.
format
(
rs
.
getDate
(
j
+
1
))
:
""
);
break
;
case
Types
.
TIME
:
row
.
createCell
(
j
).
setCellValue
(
rs
.
getTimestamp
(
j
+
1
)
!=
null
?
timeFormatter
.
format
(
rs
.
getTimestamp
(
j
+
1
))
:
""
);
break
;
default
:
if
(
cFilters
.
contains
(
cName
)){
row
.
createCell
(
j
).
setCellValue
(
"******"
);
}
else
if
(
DecryptUtils
.
isName
(
cName
,
condition
.
getConditions
())){
row
.
createCell
(
j
).
setCellValue
(
DecryptUtils
.
dataSecurityProcessUserName
(
DecryptUtils
.
decrypt
(
rs
.
getString
(
j
+
1
))));
}
else
{
row
.
createCell
(
j
).
setCellValue
(
rs
.
getString
(
j
+
1
));
}
break
;
}
}
// IF ELSE OVER
}
// FOR OVER
}
// WHILE OVER
FileOutputStream
fileOut
=
new
FileOutputStream
(
originalFilePath
);
wb
.
write
(
fileOut
);
//fileOut.flush(); // SXSSFWorkbook 使用 auto-flush 模式
fileOut
.
close
();
//wb.close();
wb
.
dispose
();
// SXSSFWorkbook 没有 close 方法
log
.
debug
(
"runDownloadTask.run"
,
"已生成自助指标下载文件 "
+
condition
.
getTaskId
()
+
".xlsx"
);
}
// IF ELSE OVER
// 如果指定压缩,则使用之
//if (task.getFormat().equals("zip")) {
if
(
task
.
getUseCompress
().
equals
(
Global
.
YES
))
{
log
.
debug
(
"runDownloadTask.run"
,
"准备生成自助指标压缩文件 "
+
condition
.
getTaskId
()
+
".zip"
);
String
zipFilePath
=
SAVE_FOLDER
+
"/"
+
condition
.
getTaskId
()
+
".zip"
;
File
zipFile
=
new
File
(
zipFilePath
);
ZipOutputStream
zos
=
null
;
byte
[]
buf
=
new
byte
[
1024
];
int
length
=
0
;
try
{
OutputStream
os
=
new
FileOutputStream
(
zipFilePath
);
BufferedOutputStream
bos
=
new
BufferedOutputStream
(
os
);
zos
=
new
ZipOutputStream
(
bos
);
zos
.
setLevel
(
6
);
// 压缩率选择 0-9
InputStream
is
=
new
FileInputStream
(
originalFilePath
);
BufferedInputStream
bis
=
new
BufferedInputStream
(
is
);
zos
.
putNextEntry
(
new
ZipEntry
(
originalFilePath
.
substring
(
originalFilePath
.
lastIndexOf
(
"/"
)
+
1
)));
while
((
length
=
bis
.
read
(
buf
))
>
0
)
{
zos
.
write
(
buf
,
0
,
length
);
}
bis
.
close
();
is
.
close
();
//bos.close();
//os.close();
log
.
debug
(
"runDownloadTask.run"
,
"已生成自助指标压缩文件 "
+
condition
.
getTaskId
()
+
".zip"
);
}
catch
(
Exception
ex2
)
{
throw
ex2
;
}
finally
{
zos
.
closeEntry
();
zos
.
close
();
}
}
task
.
setStatus
(
DownloadTaskStatus
.
COMPLISHED
);
task
.
setOverTime
(
new
Date
());
String
taskFileExt
=
task
.
getUseCompress
().
equals
(
Global
.
YES
)
?
".zip"
:
task
.
getFormat
().
equals
(
DownloadFileFormat
.
CSV
)
?
".csv"
:
".xlsx"
;
task
.
setFilePath
(
task
.
getId
()
+
taskFileExt
);
DownloadTaskServiceImpl
.
getInstance
().
updateDownloadTask
(
task
);
}
catch
(
Exception
ex
)
{
ex
.
printStackTrace
();
// 标记任务异常
task
.
setStatus
(
DownloadTaskStatus
.
ERROR
);
task
.
setOverTime
(
new
Date
());
DownloadTaskServiceImpl
.
getInstance
().
updateDownloadTask
(
task
);
}
finally
{
try
{
conn
.
close
();
}
catch
(
SQLException
e
)
{
e
.
printStackTrace
();
}
}
}
// IF OVER
}
// IF OVER
takeFile
(
condition
);
}
// 没有任务则忽略
}
catch
(
Exception
e
){
log
.
debug
(
"自助指标下载异常"
,
e
.
getMessage
());
...
...
@@ -735,165 +581,7 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
break
;
}
// IF OVER
}
// FOR OVER
if
(
condition
!=
null
)
{
// 更新任务状态
DownloadTask
task
=
DownloadTaskServiceImpl
.
getInstance
().
getDownloadTaskById
(
condition
.
getTaskId
());
task
.
setStatus
(
DownloadTaskStatus
.
BUILDING
);
DownloadTaskServiceImpl
.
getInstance
().
updateDownloadTask
(
task
);
log
.
debug
(
"runDownloadTask.run"
,
"自助指标下载任务执行:"
+
task
.
getId
());
String
fullQuery
=
buildFlatQuerySQL
(
false
,
// 下载用途
condition
.
getTableId
(),
condition
.
getEnterpriseIds
(),
condition
.
getConditions
(),
condition
.
getOrderField
(),
condition
.
getOrderDir
(),
condition
.
getExecDistinct
(),
0
,
condition
.
getAuthStoreIdList
());
Connection
conn
=
HiveHelper
.
getBalaDownloadHiveConnection
();
if
(
conn
!=
null
)
{
try
{
Statement
stat
=
conn
.
createStatement
();
// stat.setQueryTimeout(60 * 1000);
stat
.
execute
(
"REFRESH TABLE "
+
condition
.
getTableId
());
// 强制刷新表结构
ResultSet
rs
=
stat
.
executeQuery
(
fullQuery
);
// 生成指定格式下载元文件
String
originalFilePath
=
""
;
if
(
task
.
getFormat
().
equals
(
DownloadFileFormat
.
CSV
))
{
// 如果指定为 CSV 格式
log
.
debug
(
"runDownloadTask.run"
,
"准备生成自助指标下载文件 "
+
condition
.
getTaskId
()
+
".csv"
);
originalFilePath
=
SAVE_FOLDER
+
"/"
+
condition
.
getTaskId
()
+
".csv"
;
File
tmp
=
new
File
(
originalFilePath
);
if
(
tmp
.
exists
())
{
// 删除可能存在的文件
tmp
.
delete
();
}
//CSVWriter csvWriter = new CSVWriter(new FileWriter(csvPath), '\t');
OutputStreamWriter
out
=
new
OutputStreamWriter
(
new
FileOutputStream
(
originalFilePath
),
Charset
.
forName
(
"GBK"
));
ResultSetHelper
helper
=
new
CsvResultSetHelper
(
task
.
getQueryDataType
()
==
QueryDataType
.
FULL
?
CsvDataFilterMode
.
DECRYPT
:
CsvDataFilterMode
.
DESENSI
,
condition
.
getDecryptFilters
(),
condition
.
getConditions
());
CSVWriter
writer
=
new
CSVWriter
(
out
,
','
);
writer
.
setResultService
(
helper
);
writer
.
writeAll
(
rs
,
true
);
writer
.
close
();
out
.
close
();
//记得关闭资源
log
.
debug
(
"runDownloadTask.run"
,
"已生成自助指标下载文件 "
+
condition
.
getTaskId
()
+
".csv"
);
}
else
{
// 如果指定为 XLS 格式
log
.
debug
(
"runDownloadTask.run"
,
"准备生成自助指标下载文件 "
+
condition
.
getTaskId
()
+
".xlsx"
);
originalFilePath
=
SAVE_FOLDER
+
"/"
+
condition
.
getTaskId
()
+
".xlsx"
;
SXSSFWorkbook
wb
=
new
SXSSFWorkbook
(
100
);
// 内存中保留 100 行
Sheet
sheet
=
wb
.
createSheet
();
Row
row
=
sheet
.
createRow
(
0
);
Cell
cell
;
for
(
int
j
=
0
;
j
<
rs
.
getMetaData
().
getColumnCount
();
++
j
)
{
// 遍历创建表头
String
colName
=
rs
.
getMetaData
().
getColumnLabel
(
j
+
1
);
cell
=
row
.
createCell
(
j
);
cell
.
setCellValue
(
colName
);
}
// 遍历输出行
int
rowCount
=
0
;
while
(
rs
.
next
())
{
rowCount
++;
row
=
sheet
.
createRow
(
rowCount
);
for
(
int
j
=
0
;
j
<
rs
.
getMetaData
().
getColumnCount
();
++
j
)
{
//String c = rs.getString(j + 1);
//row.createCell(j).setCellValue(c);
String
cName
=
rs
.
getMetaData
().
getColumnName
(
j
+
1
);
List
<
String
>
cFilters
=
condition
.
getDecryptFilters
();
if
(
task
.
getQueryDataType
()
==
QueryDataType
.
FULL
&&
(
cFilters
.
contains
(
cName
)
||
DecryptUtils
.
isName
(
cName
,
condition
.
getConditions
())))
{
String
tmpResult
=
rs
.
getString
(
j
+
1
);
if
(
StringUtils
.
isNotBlank
(
tmpResult
))
tmpResult
=
DecryptUtils
.
getInstance
().
decrypt
(
tmpResult
);
row
.
createCell
(
j
).
setCellValue
(
tmpResult
);
}
else
{
int
cType
=
rs
.
getMetaData
().
getColumnType
(
j
+
1
);
switch
(
cType
)
{
case
Types
.
TIMESTAMP
:
row
.
createCell
(
j
).
setCellValue
(
rs
.
getTimestamp
(
j
+
1
)
!=
null
?
datetimeFormatter
.
format
(
rs
.
getTimestamp
(
j
+
1
))
:
""
);
break
;
case
Types
.
DATE
:
row
.
createCell
(
j
).
setCellValue
(
rs
.
getDate
(
j
+
1
)
!=
null
?
dateFormatter
.
format
(
rs
.
getDate
(
j
+
1
))
:
""
);
break
;
case
Types
.
TIME
:
row
.
createCell
(
j
).
setCellValue
(
rs
.
getTimestamp
(
j
+
1
)
!=
null
?
timeFormatter
.
format
(
rs
.
getTimestamp
(
j
+
1
))
:
""
);
break
;
default
:
if
(
cFilters
.
contains
(
cName
)){
row
.
createCell
(
j
).
setCellValue
(
"******"
);
}
else
if
(
DecryptUtils
.
isName
(
cName
,
condition
.
getConditions
())){
row
.
createCell
(
j
).
setCellValue
(
DecryptUtils
.
dataSecurityProcessUserName
(
DecryptUtils
.
decrypt
(
rs
.
getString
(
j
+
1
))));
}
else
{
row
.
createCell
(
j
).
setCellValue
(
rs
.
getString
(
j
+
1
));
}
break
;
}
}
// IF ELSE OVER
}
// FOR OVER
}
// WHILE OVER
FileOutputStream
fileOut
=
new
FileOutputStream
(
originalFilePath
);
wb
.
write
(
fileOut
);
//fileOut.flush(); // SXSSFWorkbook 使用 auto-flush 模式
fileOut
.
close
();
//wb.close();
wb
.
dispose
();
// SXSSFWorkbook 没有 close 方法
log
.
debug
(
"runDownloadTask.run"
,
"已生成自助指标下载文件 "
+
condition
.
getTaskId
()
+
".xlsx"
);
}
// IF ELSE OVER
// 如果指定压缩,则使用之
//if (task.getFormat().equals("zip")) {
if
(
task
.
getUseCompress
().
equals
(
Global
.
YES
))
{
log
.
debug
(
"runDownloadTask.run"
,
"准备生成自助指标压缩文件 "
+
condition
.
getTaskId
()
+
".zip"
);
String
zipFilePath
=
SAVE_FOLDER
+
"/"
+
condition
.
getTaskId
()
+
".zip"
;
File
zipFile
=
new
File
(
zipFilePath
);
ZipOutputStream
zos
=
null
;
byte
[]
buf
=
new
byte
[
1024
];
int
length
=
0
;
try
{
OutputStream
os
=
new
FileOutputStream
(
zipFilePath
);
BufferedOutputStream
bos
=
new
BufferedOutputStream
(
os
);
zos
=
new
ZipOutputStream
(
bos
);
zos
.
setLevel
(
6
);
// 压缩率选择 0-9
InputStream
is
=
new
FileInputStream
(
originalFilePath
);
BufferedInputStream
bis
=
new
BufferedInputStream
(
is
);
zos
.
putNextEntry
(
new
ZipEntry
(
originalFilePath
.
substring
(
originalFilePath
.
lastIndexOf
(
"/"
)
+
1
)));
while
((
length
=
bis
.
read
(
buf
))
>
0
)
{
zos
.
write
(
buf
,
0
,
length
);
}
bis
.
close
();
is
.
close
();
//bos.close();
//os.close();
log
.
debug
(
"runDownloadTask.run"
,
"已生成自助指标压缩文件 "
+
condition
.
getTaskId
()
+
".zip"
);
}
catch
(
Exception
ex2
)
{
throw
ex2
;
}
finally
{
zos
.
closeEntry
();
zos
.
close
();
}
}
task
.
setStatus
(
DownloadTaskStatus
.
COMPLISHED
);
task
.
setOverTime
(
new
Date
());
String
taskFileExt
=
task
.
getUseCompress
().
equals
(
Global
.
YES
)
?
".zip"
:
task
.
getFormat
().
equals
(
DownloadFileFormat
.
CSV
)
?
".csv"
:
".xlsx"
;
task
.
setFilePath
(
task
.
getId
()
+
taskFileExt
);
DownloadTaskServiceImpl
.
getInstance
().
updateDownloadTask
(
task
);
}
catch
(
Exception
ex
)
{
ex
.
printStackTrace
();
// 标记任务异常
task
.
setStatus
(
DownloadTaskStatus
.
ERROR
);
task
.
setOverTime
(
new
Date
());
DownloadTaskServiceImpl
.
getInstance
().
updateDownloadTask
(
task
);
}
finally
{
try
{
conn
.
close
();
}
catch
(
SQLException
e
)
{
e
.
printStackTrace
();
}
}
}
// IF OVER
}
// IF OVER
takeFile
(
condition
);
}
// 没有任务则忽略
}
catch
(
Exception
e
){
log
.
debug
(
"自助指标下载异常"
,
e
.
getMessage
());
...
...
@@ -903,6 +591,146 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
},
interval
*
1000
,
interval
*
1000
,
TimeUnit
.
MILLISECONDS
);
}
private
void
takeFile
(
FlatQueryTaskCondition
condition
){
if
(
condition
!=
null
)
{
// 更新任务状态
DownloadTask
task
=
DownloadTaskServiceImpl
.
getInstance
().
getDownloadTaskById
(
condition
.
getTaskId
());
task
.
setStatus
(
DownloadTaskStatus
.
BUILDING
);
DownloadTaskServiceImpl
.
getInstance
().
updateDownloadTask
(
task
);
log
.
debug
(
"runDownloadTask.run"
,
"自助指标下载任务执行:"
+
task
.
getId
());
String
fullQuery
=
buildFlatQuerySQL
(
false
,
// 下载用途
condition
.
getTableId
(),
condition
.
getEnterpriseIds
(),
condition
.
getConditions
(),
condition
.
getOrderField
(),
condition
.
getOrderDir
(),
condition
.
getExecDistinct
(),
0
,
condition
.
getAuthStoreIdList
());
Connection
conn
=
HiveHelper
.
getDownloadHiveConnection
();
log
.
debug
(
"runDownloadTask.run"
,
"获取商户连接:"
+
task
.
getId
());
if
(
conn
!=
null
)
{
try
{
Statement
stat
=
conn
.
createStatement
();
// stat.setQueryTimeout(60 * 1000);
stat
.
execute
(
"REFRESH TABLE "
+
condition
.
getTableId
());
// 强制刷新表结构
ResultSet
rs
=
stat
.
executeQuery
(
fullQuery
);
String
queryDataType
=
task
.
getQueryDataType
()
==
QueryDataType
.
FULL
?
CsvDataFilterMode
.
DECRYPT
:
CsvDataFilterMode
.
DESENSI
;
ResultSetHelper
helper
=
new
CsvResultSetHelper
(
queryDataType
,
condition
.
getDecryptFilters
(),
condition
.
getConditions
(),
condition
.
getEnterpriseIds
().
get
(
0
),
decryptKeyService
);
// 生成指定格式下载元文件
String
originalFilePath
=
""
;
if
(
task
.
getFormat
().
equals
(
DownloadFileFormat
.
CSV
))
{
// 如果指定为 CSV 格式
log
.
debug
(
"runDownloadTask.run"
,
"准备生成自助指标下载文件 "
+
condition
.
getTaskId
()
+
".csv"
);
originalFilePath
=
SAVE_FOLDER
+
"/"
+
condition
.
getTaskId
()
+
".csv"
;
File
tmp
=
new
File
(
originalFilePath
);
if
(
tmp
.
exists
())
{
// 删除可能存在的文件
tmp
.
delete
();
}
//CSVWriter csvWriter = new CSVWriter(new FileWriter(csvPath), '\t');
OutputStreamWriter
out
=
new
OutputStreamWriter
(
new
FileOutputStream
(
originalFilePath
),
Charset
.
forName
(
"GBK"
));
CSVWriter
writer
=
new
CSVWriter
(
out
,
','
);
writer
.
setResultService
(
helper
);
writer
.
writeAll
(
rs
,
true
);
writer
.
close
();
out
.
close
();
//记得关闭资源
log
.
debug
(
"runDownloadTask.run"
,
"已生成自助指标下载文件 "
+
condition
.
getTaskId
()
+
".csv"
);
}
else
{
// 如果指定为 XLS 格式
log
.
debug
(
"runDownloadTask.run"
,
"准备生成自助指标下载文件 "
+
condition
.
getTaskId
()
+
".xlsx"
);
originalFilePath
=
SAVE_FOLDER
+
"/"
+
condition
.
getTaskId
()
+
".xlsx"
;
SXSSFWorkbook
wb
=
new
SXSSFWorkbook
(
100
);
// 内存中保留 100 行
Sheet
sheet
=
wb
.
createSheet
();
Row
row
=
sheet
.
createRow
(
0
);
Cell
cell
;
String
[]
columnNames
=
helper
.
getColumnNames
(
rs
);
for
(
int
j
=
0
;
j
<
columnNames
.
length
;
j
++){
cell
=
row
.
createCell
(
j
);
cell
.
setCellValue
(
columnNames
[
j
]);
}
// 遍历输出行
int
rowCount
=
0
;
while
(
rs
.
next
())
{
rowCount
++;
row
=
sheet
.
createRow
(
rowCount
);
String
[]
columnValues
=
helper
.
getColumnValues
(
rs
,
true
,
""
,
""
);
for
(
int
j
=
0
;
j
<
columnValues
.
length
;
j
++){
row
.
createCell
(
j
).
setCellValue
(
columnValues
[
j
]);
}
}
// WHILE OVER
FileOutputStream
fileOut
=
new
FileOutputStream
(
originalFilePath
);
wb
.
write
(
fileOut
);
//fileOut.flush(); // SXSSFWorkbook 使用 auto-flush 模式
fileOut
.
close
();
//wb.close();
wb
.
dispose
();
// SXSSFWorkbook 没有 close 方法
log
.
debug
(
"runDownloadTask.run"
,
"已生成自助指标下载文件 "
+
condition
.
getTaskId
()
+
".xlsx"
);
}
// IF ELSE OVER
String
cloudFileUrl
=
"https://"
;
// 如果指定压缩,则使用之
//if (task.getFormat().equals("zip")) {
String
taskFileExt
=
task
.
getUseCompress
().
equals
(
Global
.
YES
)
?
".zip"
:
task
.
getFormat
().
equals
(
DownloadFileFormat
.
CSV
)
?
".csv"
:
".xlsx"
;
if
(
task
.
getUseCompress
().
equals
(
Global
.
YES
))
{
log
.
debug
(
"runDownloadTask.run"
,
"准备生成自助指标压缩文件 "
+
condition
.
getTaskId
()
+
".zip"
);
String
zipFilePath
=
SAVE_FOLDER
+
"/"
+
condition
.
getTaskId
()
+
".zip"
;
File
zipFile
=
new
File
(
zipFilePath
);
ZipOutputStream
zos
=
null
;
byte
[]
buf
=
new
byte
[
1024
];
int
length
=
0
;
try
{
OutputStream
os
=
new
FileOutputStream
(
zipFilePath
);
BufferedOutputStream
bos
=
new
BufferedOutputStream
(
os
);
zos
=
new
ZipOutputStream
(
bos
);
zos
.
setLevel
(
6
);
// 压缩率选择 0-9
InputStream
is
=
new
FileInputStream
(
originalFilePath
);
BufferedInputStream
bis
=
new
BufferedInputStream
(
is
);
zos
.
putNextEntry
(
new
ZipEntry
(
originalFilePath
.
substring
(
originalFilePath
.
lastIndexOf
(
"/"
)
+
1
)));
while
((
length
=
bis
.
read
(
buf
))
>
0
)
{
zos
.
write
(
buf
,
0
,
length
);
}
bis
.
close
();
is
.
close
();
//bos.close();
//os.close();
log
.
debug
(
"runDownloadTask.run"
,
"已生成自助指标压缩文件 "
+
condition
.
getTaskId
()
+
".zip"
);
}
catch
(
Exception
ex2
)
{
throw
ex2
;
}
finally
{
zos
.
closeEntry
();
zos
.
close
();
}
log
.
debug
(
"开始上传压缩文件到腾讯云"
,
task
.
getId
());
cloudFileUrl
+=
FileUploadUtil
.
simpleUploadFileFromLocal
(
zipFile
,
task
.
getName
()
+
"-"
+
task
.
getId
()+
taskFileExt
,
BucketNameEnum
.
COMPRESS_60000
.
getName
());
}
else
{
log
.
debug
(
"开始上传文件到腾讯云"
,
task
.
getId
());
cloudFileUrl
+=
FileUploadUtil
.
simpleUploadFileFromLocal
(
new
File
(
originalFilePath
),
task
.
getName
()
+
"-"
+
task
.
getId
()+
taskFileExt
,
BucketNameEnum
.
REPORT_50000
.
getName
());
}
log
.
debug
(
"上传腾讯云"
,
"地址为:"
+
cloudFileUrl
);
task
.
setStatus
(
DownloadTaskStatus
.
COMPLISHED
);
task
.
setOverTime
(
new
Date
());
task
.
setFilePath
(
cloudFileUrl
);
DownloadTaskServiceImpl
.
getInstance
().
updateDownloadTask
(
task
);
}
catch
(
Exception
ex
)
{
ex
.
printStackTrace
();
// 标记任务异常
task
.
setStatus
(
DownloadTaskStatus
.
ERROR
);
task
.
setOverTime
(
new
Date
());
DownloadTaskServiceImpl
.
getInstance
().
updateDownloadTask
(
task
);
}
finally
{
try
{
conn
.
close
();
}
catch
(
SQLException
e
)
{
e
.
printStackTrace
();
}
}
}
// IF OVER
}
// IF OVER
}
/** 下载申请检查计时器 */
//private Timer applyTimer = new Timer();
...
...
gic-cloud-data-hook-service/src/main/java/com/gic/cloud/data/hook/service/impl/FreeQueryServiceImpl.java
View file @
9470ea1d
...
...
@@ -14,6 +14,8 @@ import com.gic.cloud.data.hook.service.dao.FreeQueryRecordDao;
import
com.gic.cloud.data.hook.service.dao.FreeQuerySourceDao
;
import
com.gic.cloud.data.hook.service.entity.CsvDataFilterMode
;
import
com.gic.cloud.data.hook.service.entity.CsvResultSetHelper
;
import
com.gic.qcloud.BucketNameEnum
;
import
com.gic.qcloud.FileUploadUtil
;
import
com.google.common.collect.Lists
;
import
com.opencsv.CSVWriter
;
import
com.opencsv.ResultSetHelper
;
...
...
@@ -52,14 +54,6 @@ public class FreeQueryServiceImpl implements IFreeQueryService {
/** csv / xls 下载目录 */
public
static
final
String
SAVE_FOLDER
=
"/usr/local/data-hook-file"
;
/** 脱敏字段 */
public
static
final
List
<
String
>
FILTERS_PHONE_ONLY
=
Arrays
.
asList
(
"mobile"
,
"phone"
,
"enterprise_name"
,
"phone_number"
,
"receive_phone_number"
,
"use_phone_number"
,
"friend_phone_num"
,
"from_phone_num"
);
/** 脱敏字段 */
public
static
final
List
<
String
>
FILTERS_PHONE_AND_CARD
=
Arrays
.
asList
(
"card_num"
,
"mobile"
,
"phone"
,
"enterprise_name"
,
"phone_number"
,
"receive_phone_number"
,
"receive_card_num"
,
"use_phone_number"
,
"use_card_num"
,
"friend_card_num"
,
"from_card_num"
,
"friend_phone_num"
,
"from_phone_num"
);
public
static
final
List
<
String
>
FILETERS_USER_NAME
=
Arrays
.
asList
(
"member_name"
,
"children_name"
,
"mbr_name"
,
"receive_member_name"
,
"use_member_name"
,
"name"
,
"bb_name"
,
"friend_mbr_name"
,
"from_mbr_name"
);
@Autowired
IDownloadTaskService
downloadTaskService
;
...
...
@@ -70,8 +64,8 @@ public class FreeQueryServiceImpl implements IFreeQueryService {
*/
public
static
List
<
String
>
getFieldsFilters
(
Integer
desensiType
)
{
if
(
desensiType
==
DataDesensiType
.
PHONE_ONLY
)
{
return
FILTERS_PHONE_ONLY
;
}
else
return
FILTERS_PHONE_AND_CARD
;
return
F
ilterFieldUtils
.
F
ILTERS_PHONE_ONLY
;
}
else
return
F
ilterFieldUtils
.
F
ILTERS_PHONE_AND_CARD
;
}
/** 是否为脱敏字段 */
...
...
@@ -79,7 +73,7 @@ public class FreeQueryServiceImpl implements IFreeQueryService {
List
<
String
>
fieldsFilter
=
getFieldsFilters
(
desensiType
);
log
.
debug
(
"isFilterFields:"
,
JSON
.
toJSONString
(
fieldsFilter
)
+
"-"
+
desensiType
+
"-"
+
fieldName
);
for
(
String
filter
:
fieldsFilter
)
{
if
(
fieldName
.
contain
s
(
filter
))
return
true
;
if
(
fieldName
.
equal
s
(
filter
))
return
true
;
}
// FOR OVER
return
false
;
}
...
...
@@ -236,8 +230,8 @@ public class FreeQueryServiceImpl implements IFreeQueryService {
Object
filedValue
;
if
(
queryDataType
==
QueryDataType
.
SAFE
&&
FreeQueryServiceImpl
.
isFilterFields
(
desensiType
,
fieldName
))
{
//手机号和卡号
filedValue
=
"******"
;
}
else
if
(
queryDataType
==
QueryDataType
.
SAFE
&&
FILETERS_USER_NAME
.
contains
(
fieldName
)){
//用户名
filedValue
=
DecryptUtils
.
dataSecurityProcessUserName
(
rs
.
getString
(
fieldName
));
}
else
if
(
queryDataType
==
QueryDataType
.
SAFE
&&
F
ilterFieldUtils
.
F
ILETERS_USER_NAME
.
contains
(
fieldName
)){
//用户名
filedValue
=
DecryptUtils
.
dataSecurityProcessUserName
(
rs
.
getString
(
i
+
1
));
}
else
{
switch
(
metaData
.
getColumnType
(
i
+
1
))
{
case
Types
.
TIMESTAMP
:
...
...
@@ -250,7 +244,7 @@ public class FreeQueryServiceImpl implements IFreeQueryService {
filedValue
=
rs
.
getTimestamp
(
fieldName
)
!=
null
?
timeFormat
.
format
(
new
Date
(
rs
.
getTimestamp
(
fieldName
).
getTime
()))
:
""
;
break
;
default
:
filedValue
=
rs
.
getObject
(
fieldName
);
filedValue
=
rs
.
getObject
(
i
+
1
);
}
// SWITCH OVER
}
// IF ELSE
result
.
add
(
fieldName
,
filedValue
);
...
...
@@ -356,6 +350,21 @@ public class FreeQueryServiceImpl implements IFreeQueryService {
}
}
@Override
public
List
<
String
>
listFilterPhoneField
()
{
return
FilterFieldUtils
.
FILTERS_PHONE_ONLY
;
}
@Override
public
List
<
String
>
listFilterPhoneAndCardNoField
()
{
return
FilterFieldUtils
.
FILTERS_PHONE_AND_CARD
;
}
@Override
public
List
<
String
>
listFilterUserName
()
{
return
FilterFieldUtils
.
FILETERS_USER_NAME
;
}
private
static
SimpleDateFormat
datetimeFormatter
=
new
SimpleDateFormat
(
"yyyy-MM-dd HH:mm:ss"
);
private
static
SimpleDateFormat
dateFormatter
=
new
SimpleDateFormat
(
"yyyy-MM-dd"
);
...
...
@@ -412,7 +421,8 @@ public class FreeQueryServiceImpl implements IFreeQueryService {
//CSVWriter csvWriter = new CSVWriter(new FileWriter(csvPath), '\t');
OutputStreamWriter
out
=
new
OutputStreamWriter
(
new
FileOutputStream
(
originalFilePath
),
Charset
.
forName
(
"GBK"
));
ResultSetHelper
helper
=
new
CsvResultSetHelper
(
task
.
getQueryDataType
()
==
QueryDataType
.
FULL
?
CsvDataFilterMode
.
NONE
:
CsvDataFilterMode
.
DESENSI
,
FreeQueryServiceImpl
.
getFieldsFilters
(
condition
.
getDesensiType
()),
null
);
String
queryDataType
=
task
.
getQueryDataType
()
==
QueryDataType
.
FULL
?
CsvDataFilterMode
.
NONE
:
CsvDataFilterMode
.
DESENSI
;
ResultSetHelper
helper
=
new
CsvResultSetHelper
(
queryDataType
,
FreeQueryServiceImpl
.
getFieldsFilters
(
condition
.
getDesensiType
()),
null
,
null
,
null
);
CSVWriter
writer
=
new
CSVWriter
(
out
,
','
);
writer
.
setResultService
(
helper
);
writer
.
writeAll
(
rs
,
true
);
...
...
@@ -441,8 +451,8 @@ public class FreeQueryServiceImpl implements IFreeQueryService {
//row.createCell(j).setCellValue(c);
String
cName
=
rs
.
getMetaData
().
getColumnName
(
j
+
1
);
if
(
task
.
getQueryDataType
()
==
QueryDataType
.
SAFE
&&
(
FreeQueryServiceImpl
.
isFilterFields
(
condition
.
getDesensiType
(),
cName
)
||
F
reeQueryServiceImpl
.
FILETERS_USER_NAME
.
contains
(
cName
)))
{
if
(
F
reeQueryServiceImpl
.
FILETERS_USER_NAME
.
contains
(
cName
)){
&&
(
FreeQueryServiceImpl
.
isFilterFields
(
condition
.
getDesensiType
(),
cName
)
||
F
ilterFieldUtils
.
FILETERS_USER_NAME
.
contains
(
cName
)))
{
if
(
F
ilterFieldUtils
.
FILETERS_USER_NAME
.
contains
(
cName
)){
row
.
createCell
(
j
).
setCellValue
(
DecryptUtils
.
dataSecurityProcessUserName
(
rs
.
getString
(
j
+
1
)));
}
else
{
row
.
createCell
(
j
).
setCellValue
(
"******"
);
...
...
@@ -474,7 +484,7 @@ public class FreeQueryServiceImpl implements IFreeQueryService {
wb
.
dispose
();
// SXSSFWorkbook 没有 close 方法
log
.
debug
(
"runDownloadTask.run"
,
"已生成自定义查询下载文件 "
+
condition
.
getTaskId
()
+
".xlsx"
);
}
String
cloudFileUrl
=
""
;
if
(
task
.
getUseCompress
().
equals
(
Global
.
YES
))
{
log
.
debug
(
"runDownloadTask.run"
,
"准备生成自定义查询压缩文件 "
+
condition
.
getTaskId
()
+
".zip"
);
String
zipFilePath
=
SAVE_FOLDER
+
"/"
+
condition
.
getTaskId
()
+
".zip"
;
...
...
@@ -504,13 +514,16 @@ public class FreeQueryServiceImpl implements IFreeQueryService {
zos
.
closeEntry
();
zos
.
close
();
}
cloudFileUrl
=
FileUploadUtil
.
simpleUploadFileFromLocal
(
zipFile
,
task
.
getId
(),
BucketNameEnum
.
COMPRESS_60000
.
getName
());
}
else
{
cloudFileUrl
=
FileUploadUtil
.
simpleUploadFileFromLocal
(
new
File
(
originalFilePath
),
task
.
getId
(),
BucketNameEnum
.
REPORT_50000
.
getName
());
}
log
.
debug
(
"上传腾讯云"
,
"地址为:"
+
cloudFileUrl
);
task
.
setStatus
(
DownloadTaskStatus
.
COMPLISHED
);
task
.
setOverTime
(
new
java
.
util
.
Date
());
String
taskFileExt
=
task
.
getUseCompress
().
equals
(
Global
.
YES
)
?
".zip"
:
task
.
getFormat
().
equals
(
DownloadFileFormat
.
CSV
)
?
".csv"
:
".xlsx"
;
task
.
setFilePath
(
task
.
getId
()
+
taskFileExt
);
task
.
setFilePath
(
cloudFileUrl
);
DownloadTaskServiceImpl
.
getInstance
().
updateDownloadTask
(
task
);
}
catch
(
Exception
ex
)
{
...
...
@@ -611,4 +624,8 @@ public class FreeQueryServiceImpl implements IFreeQueryService {
}
public
static
void
main
(
String
[]
args
){
long
startTs
=
(
System
.
currentTimeMillis
()
/
60
*
1000
+
1
)
*
60
*
1000
-
System
.
currentTimeMillis
();
System
.
out
.
println
(
"startTs = "
+
startTs
);
}
}
gic-cloud-data-hook-service/src/main/resources/mapper/DhDecryptKeyMapper.xml
0 → 100644
View file @
9470ea1d
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper
namespace=
"com.gic.cloud.data.hook.service.dao.DhDecryptKeyMapper"
>
<resultMap
id=
"BaseResultMap"
type=
"com.gic.cloud.data.hook.api.dto.DhDecryptKey"
>
<id
column=
"id"
jdbcType=
"INTEGER"
property=
"id"
/>
<result
column=
"enterprise_id"
jdbcType=
"VARCHAR"
property=
"enterpriseId"
/>
<result
column=
"public_key"
jdbcType=
"VARCHAR"
property=
"publicKey"
/>
<result
column=
"private_key"
jdbcType=
"VARCHAR"
property=
"privateKey"
/>
<result
column=
"create_time"
jdbcType=
"TIMESTAMP"
property=
"createTime"
/>
<result
column=
"expire_time"
jdbcType=
"TIMESTAMP"
property=
"expireTime"
/>
<result
column=
"update_time"
jdbcType=
"TIMESTAMP"
property=
"updateTime"
/>
</resultMap>
<sql
id=
"Base_Column_List"
>
id, enterprise_id, public_key, private_key, create_time, expire_time, update_time
</sql>
<select
id=
"selectByPrimaryKey"
parameterType=
"java.lang.Integer"
resultMap=
"BaseResultMap"
>
select
<include
refid=
"Base_Column_List"
/>
from dh_decrypt_key
where id = #{id,jdbcType=INTEGER}
</select>
<delete
id=
"deleteByPrimaryKey"
parameterType=
"java.lang.Integer"
>
delete from dh_decrypt_key
where id = #{id,jdbcType=INTEGER}
</delete>
<insert
id=
"insert"
parameterType=
"com.gic.cloud.data.hook.api.dto.DhDecryptKey"
>
insert into dh_decrypt_key (id, enterprise_id, public_key,
private_key, create_time, expire_time,
update_time)
values (#{id,jdbcType=INTEGER}, #{enterpriseId,jdbcType=VARCHAR}, #{publicKey,jdbcType=VARCHAR},
#{privateKey,jdbcType=VARCHAR}, #{createTime,jdbcType=TIMESTAMP}, #{expireTime,jdbcType=TIMESTAMP},
#{updateTime,jdbcType=TIMESTAMP})
</insert>
<insert
id=
"insertSelective"
parameterType=
"com.gic.cloud.data.hook.api.dto.DhDecryptKey"
>
insert into dh_decrypt_key
<trim
prefix=
"("
suffix=
")"
suffixOverrides=
","
>
<if
test=
"id != null"
>
id,
</if>
<if
test=
"enterpriseId != null"
>
enterprise_id,
</if>
<if
test=
"publicKey != null"
>
public_key,
</if>
<if
test=
"privateKey != null"
>
private_key,
</if>
<if
test=
"createTime != null"
>
create_time,
</if>
<if
test=
"expireTime != null"
>
expire_time,
</if>
<if
test=
"updateTime != null"
>
update_time,
</if>
</trim>
<trim
prefix=
"values ("
suffix=
")"
suffixOverrides=
","
>
<if
test=
"id != null"
>
#{id,jdbcType=INTEGER},
</if>
<if
test=
"enterpriseId != null"
>
#{enterpriseId,jdbcType=VARCHAR},
</if>
<if
test=
"publicKey != null"
>
#{publicKey,jdbcType=VARCHAR},
</if>
<if
test=
"privateKey != null"
>
#{privateKey,jdbcType=VARCHAR},
</if>
<if
test=
"createTime != null"
>
#{createTime,jdbcType=TIMESTAMP},
</if>
<if
test=
"expireTime != null"
>
#{expireTime,jdbcType=TIMESTAMP},
</if>
<if
test=
"updateTime != null"
>
#{updateTime,jdbcType=TIMESTAMP},
</if>
</trim>
</insert>
<update
id=
"updateByPrimaryKeySelective"
parameterType=
"com.gic.cloud.data.hook.api.dto.DhDecryptKey"
>
update dh_decrypt_key
<set>
<if
test=
"enterpriseId != null"
>
enterprise_id = #{enterpriseId,jdbcType=VARCHAR},
</if>
<if
test=
"publicKey != null"
>
public_key = #{publicKey,jdbcType=VARCHAR},
</if>
<if
test=
"privateKey != null"
>
private_key = #{privateKey,jdbcType=VARCHAR},
</if>
<if
test=
"createTime != null"
>
create_time = #{createTime,jdbcType=TIMESTAMP},
</if>
<if
test=
"expireTime != null"
>
expire_time = #{expireTime,jdbcType=TIMESTAMP},
</if>
<if
test=
"updateTime != null"
>
update_time = #{updateTime,jdbcType=TIMESTAMP},
</if>
</set>
where id = #{id,jdbcType=INTEGER}
</update>
<update
id=
"updateByPrimaryKey"
parameterType=
"com.gic.cloud.data.hook.api.dto.DhDecryptKey"
>
update dh_decrypt_key
set enterprise_id = #{enterpriseId,jdbcType=VARCHAR},
public_key = #{publicKey,jdbcType=VARCHAR},
private_key = #{privateKey,jdbcType=VARCHAR},
create_time = #{createTime,jdbcType=TIMESTAMP},
expire_time = #{expireTime,jdbcType=TIMESTAMP},
update_time = #{updateTime,jdbcType=TIMESTAMP}
where id = #{id,jdbcType=INTEGER}
</update>
<select
id=
"selectByEnterpriseId"
resultMap=
"BaseResultMap"
>
select
<include
refid=
"Base_Column_List"
/>
from dh_decrypt_key
where enterprise_id=#{enterpriseId} and now()
<
expire_time
</select>
</mapper>
\ No newline at end of file
gic-cloud-data-hook-service/src/main/resources/mapper/FlatQueryTableDao.xml
View file @
9470ea1d
...
...
@@ -54,15 +54,16 @@
FROM
<include
refid=
"queryTables"
/>
<include
refid=
"queryJoins"
/>
where 1=1
<if
test=
"fuzzy != '' "
>
<bind
name=
"pattern"
value=
"'%' + fuzzy + '%'"
/>
WHERE
q.table_id LIKE #{pattern}
and
(
q.table_id LIKE #{pattern}
OR q.name LIKE #{pattern}
OR q.description LIKE #{pattern}
OR q.description LIKE #{pattern}
)
</if>
<if
test=
"authTables != null and authTables.size()>0"
>
where
q.table_id in
and
q.table_id in
<foreach
close=
")"
collection=
"authTables"
index=
"index"
item=
"item"
open=
"("
separator=
","
>
#{item}
</foreach>
...
...
gic-cloud-data-hook/src/main/java/com/gic/cloud/data/hook/web/FlatQueryController.java
View file @
9470ea1d
...
...
@@ -12,10 +12,7 @@ import com.gic.cloud.data.hook.api.entity.FlatQueryCondition;
import
com.gic.cloud.data.hook.api.entity.FlatQueryExecuteRequest
;
import
com.gic.cloud.data.hook.api.entity.GeneralResult
;
import
com.gic.cloud.data.hook.api.entity.Global
;
import
com.gic.cloud.data.hook.api.service.IFlatQueryResultService
;
import
com.gic.cloud.data.hook.api.service.IFlatQueryTableService
;
import
com.gic.cloud.data.hook.api.service.IMyFlatQueryService
;
import
com.gic.cloud.data.hook.api.service.SearchLogService
;
import
com.gic.cloud.data.hook.api.service.*
;
import
com.gic.enterprise.api.constant.StoreWidget
;
import
com.gic.enterprise.api.dto.EnterpriseSettingDTO
;
import
com.gic.enterprise.api.dto.StoreWidgetDTO
;
...
...
@@ -74,6 +71,8 @@ public class FlatQueryController {
private
RightService
rightService
;
@Autowired
private
SearchLogService
searchLogService
;
@Autowired
private
IFreeQueryService
freeQueryService
;
/** 查询表定义及下载量等信息
...
...
@@ -217,14 +216,14 @@ public class FlatQueryController {
if
(
dataPermission
!=
null
){
for
(
FlatQueryCondition
condition
:
queryConditions
){
if
(
dataPermission
==
1
){
if
(
FILTERS_PHONE_ONLY
.
contains
(
condition
.
getFieldName
())){
if
(
freeQueryService
.
listFilterPhoneField
()
.
contains
(
condition
.
getFieldName
())){
condition
.
setEnableEncrypt
(
true
);
}
else
{
condition
.
setEnableEncrypt
(
false
);
}
}
if
(
dataPermission
==
2
){
if
(
FILTERS_PHONE_AND_CARD
.
contains
(
condition
.
getFieldName
())){
if
(
freeQueryService
.
listFilterPhoneAndCardNoField
()
.
contains
(
condition
.
getFieldName
())){
condition
.
setEnableEncrypt
(
true
);
}
else
{
condition
.
setEnableEncrypt
(
false
);
...
...
gic-cloud-data-hook/src/main/java/com/gic/cloud/data/hook/web/FreeQueryController.java
View file @
9470ea1d
...
...
@@ -160,7 +160,8 @@ public class FreeQueryController {
dto
.
setStartTime
(
new
Date
());
Integer
dataPermission
=
this
.
enterpriseService
.
getEnterpriseSettingByEnterpriseId
(
SessionContextUtils
.
getLoginUserEnterpriseId
()).
getDataPermission
();
System
.
out
.
println
(
"!!!!!!!!:"
+
dataPermission
);
FreeQueryResult
freeQueryResult
=
this
.
freeQueryService
.
getFreeQueryResult
(
"SELECT tmpTable.* FROM ("
+
sql
+
") AS tmpTable LIMIT 1000"
,
enterpriseId
,
queryDataType
,
dataPermission
);
sql
=
parseSql
(
sql
);
FreeQueryResult
freeQueryResult
=
this
.
freeQueryService
.
getFreeQueryResult
(
sql
,
enterpriseId
,
queryDataType
,
dataPermission
);
dto
.
setEndTime
(
new
Date
());
this
.
searchLogService
.
saveLog
(
dto
);
return
freeQueryResult
;
...
...
@@ -182,6 +183,31 @@ public class FreeQueryController {
return
this
.
freeQueryService
.
buildFreeQueryDownloadTask
(
userId
,
name
,
amount
,
format
,
sql
,
enterpriseId
,
useCompress
,
dataType
,
applyId
,
desensiType
);
}
private
String
parseSql
(
String
sql
){
String
curLimit
=
"1000"
;
if
(
sql
.
indexOf
(
"information_schema.columns"
)
>
0
){
sql
=
"SELECT tmpTable.* FROM ("
+
sql
+
") AS tmpTable LIMIT 1000"
;
}
else
{
if
(
sql
.
indexOf
(
"limit"
)>
0
||
sql
.
indexOf
(
"LIMIT"
)>
0
){
int
start
=
0
;
if
(
sql
.
indexOf
(
"limit"
)>
0
){
start
=
sql
.
indexOf
(
"limit"
)
+
6
;
}
else
{
start
=
sql
.
indexOf
(
"LIMIT"
)
+
6
;
}
curLimit
=
sql
.
substring
(
start
);
if
(
curLimit
.
indexOf
(
","
)
>
0
){
curLimit
=
curLimit
.
substring
(
curLimit
.
indexOf
(
","
)+
1
);
}
if
(
Integer
.
valueOf
(
curLimit
)
>
1000
){
curLimit
=
1000
+
""
;
}
}
sql
=
sql
+
" limit "
+
curLimit
;
}
return
sql
;
}
/** 获取指定表明的所有字段
* @param tableName
* @param request
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment