Commit 11446876 by guojuxing

Merge remote-tracking branch 'origin/developer' into developer

parents 0e62fc40 140ee8fd
......@@ -37,7 +37,9 @@ public enum DownloadReportStatusEnum {
*
* 已生成
*/
GENERATED(5, "已生成")
GENERATED(5, "已生成"),
ERROR(6, "生成异常")
;
private Integer code;
private String msg;
......
package com.gic.enterprise.constant;
/**
* Excel文件下载扩展类型
* @ClassName: ExcelExtensionEnum

* @Description: 

* @author guojuxing

* @date 2019/11/6 9:41 AM

*/
public enum ExcelExtensionEnum {
EXCEL_2007(1, ".xlsx"),
EXCEL_2003(2, ".xls"),
CSV(3, ".csv");
private int code;
private String extension;
private ExcelExtensionEnum(int code, String extension) {
this.code = code;
this.extension = extension;
}
/**
* 根据code获取枚举对象,默认返回CSV
* @Title: getByCode

* @Description:

* @author guojuxing
* @param code

* @return com.gic.download.constants.ExcelExtensionEnum


*/
public static ExcelExtensionEnum getByCode(Integer code) {
if (code == null) {
return ExcelExtensionEnum.CSV;
}
for (ExcelExtensionEnum extensionEnum : values()) {
if (code.intValue() == extensionEnum.getCode()) {
return extensionEnum;
}
}
return ExcelExtensionEnum.CSV;
}
public static String getExtensionByCode(Integer code) {
if (code == null) {
return ".csv";
}
for (ExcelExtensionEnum extensionEnum : values()) {
if (code.intValue() == extensionEnum.getCode()) {
return extensionEnum.getExtension();
}
}
return ".csv";
}
public int getCode() {
return code;
}
public void setCode(int code) {
this.code = code;
}
public String getExtension() {
return extension;
}
public void setExtension(String extension) {
this.extension = extension;
}
}
......@@ -115,6 +115,11 @@ public class DownloadReportDTO implements Serializable {
private Integer downloadReportStatus;
/**数据源 默认为table 表示由业务方提供数据, hive表示提供数据从hive获取**/
private String dataSource = "table";
/**查询hive的sql**/
private String hiveSql;
/**
* 审核人
*/
......@@ -327,6 +332,22 @@ public class DownloadReportDTO implements Serializable {
this.projectCode = projectCode;
}
public String getDataSource() {
return dataSource;
}
public void setDataSource(String dataSource) {
this.dataSource = dataSource;
}
public String getHiveSql() {
return hiveSql;
}
public void setHiveSql(String hiveSql) {
this.hiveSql = hiveSql;
}
@Override
public String toString() {
return "DownloadReportDTO{" +
......@@ -350,6 +371,8 @@ public class DownloadReportDTO implements Serializable {
", createTime=" + createTime +
", updateTime=" + updateTime +
", downloadReportStatus=" + downloadReportStatus +
", dataSource='" + dataSource + '\'' +
", hiveSql='" + hiveSql + '\'' +
", auditorId=" + auditorId +
", auditorName='" + auditorName + '\'' +
", auditorPhone='" + auditorPhone + '\'' +
......
package com.gic.enterprise.dto;
import com.gic.enterprise.constant.ExcelExtensionEnum;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
public class HiveDataDownloadDTO implements Serializable {
private Integer enterpriseId;
/***hive查询sql**/
private String sql;
/**申请人用户id*/
private Integer applyUserId;
/**数据源**/
private String dataContent;
/**报告类型***/
private Integer dataType;
/**可以尝试用request.getHeader("Referer")获取dataUrl**/
private String dataUrl;
/**下载文件名*/
private String fileName;
/**文件扩展 1.xlsx 2.xls 3.csv**/
private Integer excelExtension;
/**待下载数据的开始时间**/
private String reportStartTime;
/**待下载数据的截止时间**/
private String reportEndTime;
/**脱敏字段*/
private List<String> desenField = new ArrayList<>();
public Integer getEnterpriseId() {
return enterpriseId;
}
public void setEnterpriseId(Integer enterpriseId) {
this.enterpriseId = enterpriseId;
}
public String getSql() {
return sql;
}
public void setSql(String sql) {
this.sql = sql;
}
public Integer getApplyUserId() {
return applyUserId;
}
public void setApplyUserId(Integer applyUserId) {
this.applyUserId = applyUserId;
}
public String getDataContent() {
return dataContent;
}
public void setDataContent(String dataContent) {
this.dataContent = dataContent;
}
public Integer getDataType() {
return dataType;
}
public void setDataType(Integer dataType) {
this.dataType = dataType;
}
public String getDataUrl() {
return dataUrl;
}
public void setDataUrl(String dataUrl) {
this.dataUrl = dataUrl;
}
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public String getReportStartTime() {
return reportStartTime;
}
public void setReportStartTime(String reportStartTime) {
this.reportStartTime = reportStartTime;
}
public String getReportEndTime() {
return reportEndTime;
}
public void setReportEndTime(String reportEndTime) {
this.reportEndTime = reportEndTime;
}
public List<String> getDesenField() {
return desenField;
}
public void setDesenField(List<String> desenField) {
this.desenField = desenField;
}
public Integer getExcelExtension() {
return excelExtension;
}
public void setExcelExtension(Integer excelExtension) {
this.excelExtension = excelExtension;
}
}
......@@ -101,4 +101,6 @@ public interface DownloadReportApiService {
*/
ServiceResponse<String> saveDownloadLogReturnDownloadUrl(Integer downloadReportId, Integer loginUserId);
ServiceResponse<Void> receiveHeart(Integer reportId);
}
package com.gic.enterprise.service;
import com.gic.api.base.commons.ServiceResponse;
import com.gic.enterprise.dto.HiveDataDownloadDTO;
public interface HiveDataDownloadApiService {
ServiceResponse<Integer> createDownloadTask(HiveDataDownloadDTO hiveDataDownloadDTO);
}
......@@ -231,6 +231,26 @@
<artifactId>gic-dubbo-extension</artifactId>
<version>4.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.opencsv</groupId>
<artifactId>opencsv</artifactId>
<version>3.3</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>3.1.2</version>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>3.10-FINAL</version>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>3.12</version>
</dependency>
</dependencies>
<build>
......
......@@ -3,11 +3,7 @@ package com.gic.enterprise.entity;
import java.util.Date;
/**
*
* @ClassName: TabDownloadReport
* @Description:
* @author taogs
* @date 2020/9/11 15:42
* tab_download_report
*/
public class TabDownloadReport {
/**
......@@ -125,6 +121,21 @@ public class TabDownloadReport {
*/
private String projectCode;
/**
* 批量任务的标识
*/
private String batchToken;
/**
* 数据源,table表示从表中获取,即业务提供数据源;hive表示从hive中获取,需要传递sql
*/
private String dataSource;
/**
*
*/
private String hiveSql;
public Integer getDownloadReportId() {
return downloadReportId;
}
......@@ -308,4 +319,28 @@ public class TabDownloadReport {
public void setProjectCode(String projectCode) {
this.projectCode = projectCode;
}
public String getBatchToken() {
return batchToken;
}
public void setBatchToken(String batchToken) {
this.batchToken = batchToken;
}
public String getDataSource() {
return dataSource;
}
public void setDataSource(String dataSource) {
this.dataSource = dataSource;
}
public String getHiveSql() {
return hiveSql;
}
public void setHiveSql(String hiveSql) {
this.hiveSql = hiveSql;
}
}
\ No newline at end of file
package com.gic.enterprise.pojo;
import java.util.ArrayList;
import java.util.List;
public class DownloadTaskPOJO {
/***报告id***/
private String reportId;
/**表名**/
private String tableName;
/***hive查询语句***/
private String hiveSql;
private Integer dataType;
/***是否可以下载***/
private boolean canDownload = false;
/**脱敏字段*/
private List<String> desenField = new ArrayList<>();
/***文件名称**/
private String fileName;
/**文件扩展 1.xlsx 2.xls 3.csv**/
private Integer excelExtension;
public String getReportId() {
return reportId;
}
public void setReportId(String reportId) {
this.reportId = reportId;
}
public String getHiveSql() {
return hiveSql;
}
public void setHiveSql(String hiveSql) {
this.hiveSql = hiveSql;
}
public boolean isCanDownload() {
return canDownload;
}
public void setCanDownload(boolean canDownload) {
this.canDownload = canDownload;
}
public List<String> getDesenField() {
return desenField;
}
public void setDesenField(List<String> desenField) {
this.desenField = desenField;
}
public String getTableName() {
return tableName;
}
public void setTableName(String tableName) {
this.tableName = tableName;
}
public Integer getExcelExtension() {
return excelExtension;
}
public void setExcelExtension(Integer excelExtension) {
this.excelExtension = excelExtension;
}
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public Integer getDataType() {
return dataType;
}
public void setDataType(Integer dataType) {
this.dataType = dataType;
}
}
package com.gic.enterprise.service;
import com.gic.enterprise.constant.DownloadReportStatusEnum;
import com.gic.enterprise.dto.DownloadReportDTO;
import com.gic.enterprise.entity.TabDownloadReport;
import com.gic.enterprise.qo.DownloadReportQO;
......@@ -74,4 +75,16 @@ public interface DownloadReportService {
* @throws
*/
void updateAudit(TabDownloadReport downloadReport);
/**
* updateDownloadStatus
* @Title: updateDownloadStatus
* @Description: 更新任务下载状态
* @author taogs
* @param reportId
* @param downloadReportStatusEnum
* @return void
* @throws
*/
void updateDownloadStatus(String reportId, DownloadReportStatusEnum downloadReportStatusEnum);
}
......@@ -3,6 +3,7 @@ package com.gic.enterprise.service.impl;
import com.gic.commons.util.EntityUtil;
import com.gic.commons.util.GlobalInfo;
import com.gic.enterprise.constant.DataSecurityAuditEnum;
import com.gic.enterprise.constant.DownloadReportStatusEnum;
import com.gic.enterprise.dao.mapper.TabDownloadReportMapper;
import com.gic.enterprise.dto.DownloadReportDTO;
import com.gic.enterprise.entity.TabDownloadReport;
......@@ -102,6 +103,14 @@ public class DownloadReportServiceImpl implements DownloadReportService {
tabDownloadReportMapper.updateByPrimaryKeySelective(downloadReport);
}
@Override
public void updateDownloadStatus(String reportId, DownloadReportStatusEnum downloadReportStatusEnum) {
TabDownloadReport downloadReport = new TabDownloadReport();
downloadReport.setReportId(reportId);
downloadReport.setStatus(downloadReportStatusEnum.getCode());
tabDownloadReportMapper.updateByPrimaryKeySelective(downloadReport);
}
private String getReportId() {
String reportId = CreateRandomUtils.generateNumberStr(8);
TabDownloadReport downloadReport = new TabDownloadReport();
......
......@@ -19,14 +19,24 @@ import com.gic.enterprise.error.ErrorCode;
import com.gic.enterprise.qo.DownloadReportQO;
import com.gic.enterprise.response.EnterpriseServiceResponse;
import com.gic.enterprise.service.*;
import com.gic.redis.data.util.RedisUtil;
import com.gic.store.utils.valid.ValidUtil;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.redisson.api.RMap;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
*
......@@ -52,6 +62,35 @@ public class DownloadReportApiServiceImpl implements DownloadReportApiService {
@Autowired
private Config config;
private static final String reportKey = "enterprise:report";
ScheduledExecutorService checkReportHeart = new ScheduledThreadPoolExecutor(1,
new BasicThreadFactory.Builder().namingPattern("reportHeartTimer-%d").daemon(true).build());
@PostConstruct
public void init(){
checkReportHeart.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
RMap<Object, Object> map = RedisUtil.getRedisClient().getMap(reportKey);
if(MapUtils.isNotEmpty(map)){
Iterator<Object> iterator = map.keySet().iterator();
while (iterator.hasNext()){
Object next = iterator.next();
long putTime = (long) map.get(next);
if((System.currentTimeMillis() - putTime) > 10*60*1000){
//超过十分钟 如果报告还在生成中则设置为失败
TabDownloadReport report = downloadReportService.getById((Integer) next);
if(report != null && report.getStatus().equals(DownloadReportStatusEnum.BUILD.getCode())){
downloadReportService.updateDownloadStatus(next.toString(), DownloadReportStatusEnum.ERROR);
}
map.remove(next);
}
}
}
}
}, 3, 3, TimeUnit.SECONDS);
}
@Override
public ServiceResponse<Integer> saveDownloadReport(DownloadReportDTO downloadReportDTO) {
ServiceResponse paramResult = ValidUtil.allCheckValidate(downloadReportDTO, DownloadReportDTO.SaveValid.class);
......@@ -203,4 +242,11 @@ public class DownloadReportApiServiceImpl implements DownloadReportApiService {
downloadLogService.save(downloadReport, loginUserId, userDTO.getUserName());
return EnterpriseServiceResponse.success(downloadReport.getDownloadUrl());
}
@Override
public ServiceResponse<Void> receiveHeart(Integer reportId) {
RMap<Object, Object> map = RedisUtil.getRedisClient().getMap(reportKey);
map.put(reportId, System.currentTimeMillis());
return ServiceResponse.success();
}
}
package com.gic.enterprise.service.outer.impl;
import com.gic.api.base.commons.ServiceResponse;
import com.gic.commons.util.EntityUtil;
import com.gic.enterprise.constant.DownloadReportDataTypeEnum;
import com.gic.enterprise.constant.DownloadReportStatusEnum;
import com.gic.enterprise.constant.ExcelExtensionEnum;
import com.gic.enterprise.dto.DownloadReportDTO;
import com.gic.enterprise.dto.HiveDataDownloadDTO;
import com.gic.enterprise.entity.TabDownloadReport;
import com.gic.enterprise.pojo.DownloadTaskPOJO;
import com.gic.enterprise.service.DownloadReportApiService;
import com.gic.enterprise.service.DownloadReportService;
import com.gic.enterprise.service.HiveDataDownloadApiService;
import com.gic.enterprise.utils.FileUtils;
import com.gic.enterprise.utils.HiveConnUtils;
import com.gic.thirdparty.BucketNameEnum;
import com.gic.thirdparty.FileOperateUtils;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.io.File;
import java.sql.*;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
@Service("hiveDataDownloadApiService")
public class HiveDataDownloadApiServiceImpl implements HiveDataDownloadApiService {
private static final Logger log = LogManager.getLogger(HiveDataDownloadApiServiceImpl.class);
ScheduledExecutorService downloadService = new ScheduledThreadPoolExecutor(1,
new BasicThreadFactory.Builder().namingPattern("downloadTimer-%d").daemon(true).build());
ScheduledExecutorService applyService = new ScheduledThreadPoolExecutor(1,
new BasicThreadFactory.Builder().namingPattern("applyTimer-%d").daemon(true).build());
private List<DownloadTaskPOJO> taskList = new ArrayList<>();
@Autowired
private DownloadReportApiService downloadReportApiService;
@Autowired
private DownloadReportService downloadReportService;
@PostConstruct
private void init(){
//启动下载定时任务
runDownloadTask(3);
//启动获取任务审核状态定时任务
runApplyTask(5);
}
/***
* 1 创建下载记录
* 如果需要审批 发送审批申请
* 需要定时拉去任务的审核状态
* 2 写入任务队列
* 3 定时处理任务队列
* @param hiveDataDownloadDTO
* @return
*/
@Override
public ServiceResponse<Integer> createDownloadTask(HiveDataDownloadDTO hiveDataDownloadDTO) {
ServiceResponse<Integer> response = downloadReportApiService.saveDownloadReport(EntityUtil.changeEntityByJSON(DownloadReportDTO.class, hiveDataDownloadDTO));
if(!response.isSuccess()){
return response;
}
buildTask(response.getResult(), hiveDataDownloadDTO);
return response;
}
private void buildTask(Integer reportId, HiveDataDownloadDTO hiveDataDownloadDTO){
DownloadTaskPOJO downloadTaskPOJO = new DownloadTaskPOJO();
downloadTaskPOJO.setReportId(reportId.toString());
downloadTaskPOJO.setHiveSql(hiveDataDownloadDTO.getSql());
downloadTaskPOJO.setCanDownload(!DownloadReportDataTypeEnum.COMPLETE_DATA.getCode().equals(hiveDataDownloadDTO.getDataType()));
downloadTaskPOJO.setDesenField(hiveDataDownloadDTO.getDesenField());
downloadTaskPOJO.setExcelExtension(hiveDataDownloadDTO.getExcelExtension());
taskList.add(downloadTaskPOJO);
}
private void runDownloadTask(Integer interval) {
downloadService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
Iterator<DownloadTaskPOJO> iterator = taskList.iterator();
while (iterator.hasNext()){
DownloadTaskPOJO task = iterator.next();
if(task.isCanDownload()){
iterator.remove();
// 修改任务状态为生成中
downloadReportService.updateDownloadStatus(task.getReportId(), DownloadReportStatusEnum.BUILD);
// 获取hive连接
Connection conn = HiveConnUtils.getConnection();
Statement stat = null;
try {
stat = conn.createStatement();
stat.execute("REFRESH TABLE " + task.getTableName()); // 强制刷新表结构 防止数据出错
ResultSet rs = stat.executeQuery(task.getHiveSql());
String url = FileUtils.createFile(rs, task.getDataType(), task.getDesenField(), task.getFileName(), task.getExcelExtension());
downloadReportService.updateDownloadUrl(Integer.valueOf(task.getReportId()), url);
} catch (Exception e) {
log.warn("下载数据出现异常", e);
// 任务状态修改
downloadReportService.updateDownloadStatus(task.getReportId(), DownloadReportStatusEnum.ERROR);
//todo 线程重启
} finally {
try {
if(stat != null) {
stat.close();
}
if(conn != null) {
conn.close();
}
} catch (SQLException e) {
log.warn("stat 关闭异常", e);
}
}
}
}
}
}, interval*1000, interval*1000, TimeUnit.MILLISECONDS);
}
private void runApplyTask(Integer interval) {
applyService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
//todo 处理因为异常线程退出问题
Iterator<DownloadTaskPOJO> iterator = taskList.iterator();
while (iterator.hasNext()){
DownloadTaskPOJO task = iterator.next();
if(!task.isCanDownload()){
TabDownloadReport report = downloadReportService.getById(Integer.valueOf(task.getReportId()));
Integer auditResult = report.getAuditResult();
/**没有审批通过则直接跳过*/
if(DownloadReportStatusEnum.WAIT.getCode().equals(auditResult)){
continue;
}
/**审批通过 设置任务为可下载状态***/
if(DownloadReportStatusEnum.PASS.getCode().equals(auditResult)){
task.setCanDownload(true);
}
/**超时移除任务*/
if(DownloadReportStatusEnum.OVER_TIME.getCode().equals(auditResult)){
iterator.remove();
}
}
}
}
}, interval*1000, interval*1000, TimeUnit.MILLISECONDS);
}
}
package com.gic.enterprise.utils;
import com.gic.enterprise.constant.DownloadReportDataTypeEnum;
import com.opencsv.ResultSetHelper;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.IOException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.text.SimpleDateFormat;
import java.util.List;
public class CsvResultSetHelper implements ResultSetHelper {
private static final Logger log = LogManager.getLogger(CsvResultSetHelper.class);
/** 数据过滤模式 */
private Integer dataType = DownloadReportDataTypeEnum.DESENSITIZATION_DATA.getCode();
/** 数据过滤用关键字集合 */
private List<String> filters = null;
public CsvResultSetHelper(Integer dataType, List<String> filters) {
this.dataType = dataType;
this.filters = filters;
}
@Override
public String[] getColumnNames(ResultSet resultSet) throws SQLException {
String[] result = new String[0];
int columnCount = resultSet.getMetaData().getColumnCount();
if (columnCount > 0) {
result = new String[columnCount];
for (int i=0; i<columnCount; i++) {
String rsColumnName = resultSet.getMetaData().getColumnLabel(i + 1);
result[i] = rsColumnName;
} // FOR OVER
} // IF OVER
return result;
}
@Override
public String[] getColumnValues(ResultSet resultSet) throws SQLException, IOException {
return this.getColumnValues(resultSet, false);
}
@Override
public String[] getColumnValues(ResultSet resultSet, boolean b) throws SQLException, IOException {
return this.getColumnValues(resultSet, false, "yyyy-MM-dd HH:mm:ss", "HH:mm:ss");
}
private static SimpleDateFormat datetimeFormatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private static SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd");
private static SimpleDateFormat timeFormatter = new SimpleDateFormat("hh:mm:ss");
@Override
public String[] getColumnValues(ResultSet resultSet, boolean b, String s, String s1) throws SQLException, IOException {
String[] result = new String[0];
int columnCount = resultSet.getMetaData().getColumnCount();
if (columnCount > 0) {
result = new String[columnCount];
for (int i=0; i<columnCount; i++) {
String columnName = resultSet.getMetaData().getColumnLabel(i + 1);
if(dataType == DownloadReportDataTypeEnum.DESENSITIZATION_DATA.getCode()){
//如果是脱敏数据
if(filters.contains(columnName)){
result[i] = "******";
} else {
getValue(resultSet, result, i, columnName);
}
} else {
//完整数据不处理
getValue(resultSet, result, i, columnName);
}
}
}
return result;
}
private void getValue(ResultSet resultSet, String[] result, int i, String columnName){
try {
int columnType = resultSet.getMetaData().getColumnType(i + 1);
switch (columnType) {
case Types.TIMESTAMP:
result[i] = resultSet.getTimestamp(columnName) != null ? datetimeFormatter.format(resultSet.getTimestamp(columnName)) : "";
break;
case Types.DATE:
//result[i] = resultSet.getTimestamp(columnName) != null ? dateFormatter.format(resultSet.getTimestamp(columnName)) : "";
result[i] = resultSet.getDate(columnName) != null ? dateFormatter.format(resultSet.getDate(columnName)) : "";
break;
case Types.TIME:
result[i] = resultSet.getTimestamp(columnName) != null ? timeFormatter.format(resultSet.getTimestamp(columnName)) : "";
break;
default:
result[i] = String.valueOf(resultSet.getObject(columnName));
break;
}
}catch (Exception e){
log.warn(e);
}
}
}
package com.gic.enterprise.utils;
import com.gic.enterprise.constant.DownloadReportDataTypeEnum;
import com.gic.enterprise.constant.ExcelExtensionEnum;
import com.gic.enterprise.pojo.DownloadTaskPOJO;
import com.gic.thirdparty.BucketNameEnum;
import com.gic.thirdparty.FileOperateUtils;
import com.opencsv.CSVWriter;
import com.opencsv.ResultSetHelper;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.xssf.streaming.SXSSFWorkbook;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.nio.charset.Charset;
import java.sql.ResultSet;
import java.sql.Types;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
public class FileUtils {
/** csv / xls 下载目录 */
public static final String SAVE_FOLDER = "/usr/local/data-hook-file";
private static final Logger log = LogManager.getLogger(FileUtils.class);
private static SimpleDateFormat datetimeFormatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private static SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd");
private static SimpleDateFormat timeFormatter = new SimpleDateFormat("HH:mm:ss");
public static String createFile(ResultSet rs, Integer dataType, List<String> desenField, String fileName, Integer fileExt){
String filePath = getFilePath(fileName, fileExt);
File tmpFile = createTmpFile(filePath);
if (fileExt.equals(ExcelExtensionEnum.CSV.getCode())) {
createCsvFile(rs, dataType, desenField, tmpFile, fileName, fileExt);
} else {
createXls(rs, dataType, desenField, tmpFile, fileName, fileExt);
}
String url = uploadFile(tmpFile, fileName, fileExt);
tmpFile.deleteOnExit();
return url;
}
private static void createXls(ResultSet rs, Integer dataType, List<String> desenField, File tempFile, String fileName, Integer fileExt){
log.info("开始生成文件 " + fileName + ".xlsx");
try{
SXSSFWorkbook wb = new SXSSFWorkbook(100); // 内存中保留 100 行
Sheet sheet = wb.createSheet();
Row row = sheet.createRow(0);
Cell cell;
for (int j = 0; j < rs.getMetaData().getColumnCount(); ++j) { // 遍历创建表头
String colName = rs.getMetaData().getColumnLabel(j + 1);
cell = row.createCell(j);
cell.setCellValue(colName);
}
// 遍历输出行
int rowCount = 0;
while (rs.next()) {
rowCount++;
row = sheet.createRow(rowCount);
for (int j = 0; j < rs.getMetaData().getColumnCount(); ++j) {
String cName = rs.getMetaData().getColumnName(j+1);
if (dataType.equals(DownloadReportDataTypeEnum.DESENSITIZATION_DATA.getCode())) {
if(desenField.contains(cName)){
row.createCell(j).setCellValue("******");
}else {
getValue(rs, row, j);
}
} else {
getValue(rs, row, j);
}
}
}
FileOutputStream fileOut = new FileOutputStream(tempFile);
wb.write(fileOut);
fileOut.close();
wb.dispose(); // SXSSFWorkbook 没有 close 方法
log.info("临时文件已生成, " + fileName + ".xlsx");
}catch (Exception e){
log.warn(e);
}
}
private static void createCsvFile(ResultSet rs, Integer dataType, List<String> desenField, File tempFile, String fileName, Integer fileExt){
try {
OutputStreamWriter out = new OutputStreamWriter(new FileOutputStream(tempFile), Charset.forName("GBK"));
ResultSetHelper helper = new CsvResultSetHelper(dataType, desenField);
CSVWriter writer = new CSVWriter(out, ',');
writer.setResultService(helper);
writer.writeAll(rs, true);
writer.close();
out.close();//记得关闭资源
log.info("临时文件已生成 " + fileName + ".csv");
} catch (Exception e) {
e.printStackTrace();
}
}
private static File createTmpFile(String path){
File tmp = new File(path);
if (tmp.exists()) { // 删除可能存在的文件
tmp.delete();
}
return tmp;
}
private static String getFilePath(String fileName, Integer fileExt){
String originalFilePath = SAVE_FOLDER + File.separator + fileName ;
if(ExcelExtensionEnum.CSV.equals(fileExt)){
originalFilePath += ExcelExtensionEnum.CSV;
} else if(ExcelExtensionEnum.EXCEL_2003.equals(fileExt)){
originalFilePath += ExcelExtensionEnum.EXCEL_2003;
} else if(ExcelExtensionEnum.EXCEL_2007.equals(fileExt)){
originalFilePath += ExcelExtensionEnum.EXCEL_2007;
}
return originalFilePath;
}
private static String uploadFile(File tmp, String fileName, Integer fileExt){
//上传文件
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
String fieldCode = "/" + fileName + "_" + sdf.format(new Date()) + ExcelExtensionEnum.getExtensionByCode(fileExt);
log.info("上传文件到腾讯云,文件名:" + fieldCode);
//文件存进腾讯云
String url = FileOperateUtils.simpleUploadFileFromLocal(tmp, fieldCode, BucketNameEnum.REPORT_50000.getName());
if (org.apache.commons.lang.StringUtils.isNotBlank(url)) {
url = "https://" + url;
}
log.info("上传文件到腾讯云,路径:" + url);
return url;
}
private static void getValue(ResultSet rs, Row row, int j){
try{
int cType = rs.getMetaData().getColumnType(j + 1);
switch (cType) {
case Types.TIMESTAMP:
row.createCell(j).setCellValue(rs.getTimestamp(j + 1) != null ? datetimeFormatter.format(rs.getTimestamp(j + 1)) : "");
break;
case Types.DATE:
row.createCell(j).setCellValue(rs.getDate(j + 1) != null ? dateFormatter.format(rs.getDate(j + 1)) : "");
break;
case Types.TIME:
row.createCell(j).setCellValue(rs.getTimestamp(j + 1) != null ? timeFormatter.format(rs.getTimestamp(j + 1)) : "");
break;
default:
row.createCell(j).setCellValue(rs.getString(j + 1));
break;
}
}catch (Exception e){
log.warn(e);
}
}
}
package com.gic.enterprise.utils;
import com.ctrip.framework.apollo.Config;
import com.ctrip.framework.apollo.ConfigService;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class HiveConnUtils {
private static final Logger log = LogManager.getLogger(HiveConnUtils.class);
private static String driverName = "org.apache.hive.jdbc.HiveDriver";
private static Map<String, Connection> conCache = new ConcurrentHashMap<>();
private static boolean start = false;
/**hive由于长时间跑会出现稳定性问题,所以需要每天重启,使用连接池连接没法释放,导致报错,所以采用jdbc直接获取连接并缓存,定时检测连接的有效性**/
public static Connection getConnection(){
Config config = ConfigService.getConfig("BUZ.COMMON.HIVE");
String url = config.getProperty("hive.download.url", "");
String user = config.getProperty("hive.username", "");
String password = config.getProperty("hive.password", "");
return getConnnection(url, user, password);
}
public static Connection getConnection(String hiveHost, String port){
String hiveUrl = getHiveUrl(hiveHost, port);
Config config = ConfigService.getConfig("BUZ.COMMON.HIVE");
String user = config.getProperty("hive.username", "");
String password = config.getProperty("hive.password", "");
return getConnnection(hiveUrl, user, password);
}
public static Connection getConnnection(String url, String user, String password){
if(!start){
//todo 启动线程检查连接状态
}
try {
String key = url + ":" + user;
Connection connection = conCache.get(key);
if(connection != null){
if(connection.isValid(10)){
return connection;
}
}
Class.forName(driverName);
connection = DriverManager.getConnection(url, user, password);
conCache.put(key, connection);
return connection;
} catch (ClassNotFoundException | SQLException e) {
log.warn("获取hive连接异常:", e);
}
return null;
}
private static String getHiveUrl(String hiveHost, String port){
String url = "jdbc:hive2://"+ hiveHost +":"+ port +"/gicads?thriftSocketTimeout=10000;auth=noSasl";
return url;
}
}
......@@ -137,6 +137,7 @@
<dubbo:service interface="com.gic.enterprise.service.ProductMarketApiService" ref="productMarketApiService" timeout="6000" />
<dubbo:service interface="com.gic.enterprise.service.BlackListApiService" ref="blackListApiService" timeout="6000" />
<dubbo:service interface="com.gic.enterprise.service.CustomSettingApiService" ref="customSettingApiService" timeout="6000" />
<dubbo:service interface="com.gic.enterprise.service.HiveDataDownloadApiService" ref="hiveDataDownloadApiService" timeout="6000" />
<dubbo:service interface="com.gic.enterprise.service.CustomGuideTopApiService" ref="customGuideTopApiService" timeout="6000" />
......
......@@ -25,16 +25,26 @@
<result column="auditor_phone" jdbcType="VARCHAR" property="auditorPhone" />
<result column="audit_reason" jdbcType="VARCHAR" property="auditReason" />
<result column="project_code" jdbcType="VARCHAR" property="projectCode" />
<result column="batch_token" jdbcType="VARCHAR" property="batchToken" />
<result column="data_source" jdbcType="VARCHAR" property="dataSource" />
</resultMap>
<resultMap extends="BaseResultMap" id="ResultMapWithBLOBs" type="com.gic.enterprise.entity.TabDownloadReport">
<result column="hive_sql" jdbcType="LONGVARCHAR" property="hiveSql" />
</resultMap>
<sql id="Base_Column_List">
download_report_id, enterprise_id, report_id, apply_user_id, apply_user_name, report_start_time,
report_end_time, file_name, data_content, data_url, data_type, data_count, audit_result,
audit_expire_time, download_url, status, create_time, update_time, auditor_id, auditor_name,
auditor_phone, audit_reason, project_code
auditor_phone, audit_reason, project_code, batch_token, data_source
</sql>
<sql id="Blob_Column_List">
hive_sql
</sql>
<select id="selectByPrimaryKey" parameterType="java.lang.Integer" resultMap="BaseResultMap">
<select id="selectByPrimaryKey" parameterType="java.lang.Integer" resultMap="ResultMapWithBLOBs">
select
<include refid="Base_Column_List" />
,
<include refid="Blob_Column_List" />
from tab_download_report
where download_report_id = #{downloadReportId,jdbcType=INTEGER}
</select>
......@@ -47,7 +57,8 @@
audit_result, audit_expire_time, download_url,
status, create_time, update_time,
auditor_id, auditor_name, auditor_phone,
audit_reason, project_code)
audit_reason, project_code, batch_token,
data_source, hive_sql)
values (#{downloadReportId,jdbcType=INTEGER}, #{enterpriseId,jdbcType=INTEGER}, #{reportId,jdbcType=VARCHAR},
#{applyUserId,jdbcType=INTEGER}, #{applyUserName,jdbcType=VARCHAR}, #{reportStartTime,jdbcType=TIMESTAMP},
#{reportEndTime,jdbcType=TIMESTAMP}, #{fileName,jdbcType=VARCHAR}, #{dataContent,jdbcType=VARCHAR},
......@@ -55,12 +66,10 @@
#{auditResult,jdbcType=INTEGER}, #{auditExpireTime,jdbcType=TIMESTAMP}, #{downloadUrl,jdbcType=VARCHAR},
#{status,jdbcType=INTEGER}, #{createTime,jdbcType=TIMESTAMP}, #{updateTime,jdbcType=TIMESTAMP},
#{auditorId,jdbcType=INTEGER}, #{auditorName,jdbcType=VARCHAR}, #{auditorPhone,jdbcType=VARCHAR},
#{auditReason,jdbcType=VARCHAR}, #{projectCode,jdbcType=VARCHAR})
#{auditReason,jdbcType=VARCHAR}, #{projectCode,jdbcType=VARCHAR}, #{batchToken,jdbcType=VARCHAR},
#{dataSource,jdbcType=VARCHAR}, #{hiveSql,jdbcType=LONGVARCHAR})
</insert>
<insert id="insertSelective" parameterType="com.gic.enterprise.entity.TabDownloadReport">
<selectKey keyProperty="downloadReportId" order="AFTER" resultType="java.lang.Integer">
SELECT LAST_INSERT_ID()
</selectKey>
insert into tab_download_report
<trim prefix="(" suffix=")" suffixOverrides=",">
<if test="downloadReportId != null">
......@@ -132,6 +141,15 @@
<if test="projectCode != null">
project_code,
</if>
<if test="batchToken != null">
batch_token,
</if>
<if test="dataSource != null">
data_source,
</if>
<if test="hiveSql != null">
hive_sql,
</if>
</trim>
<trim prefix="values (" suffix=")" suffixOverrides=",">
<if test="downloadReportId != null">
......@@ -203,6 +221,15 @@
<if test="projectCode != null">
#{projectCode,jdbcType=VARCHAR},
</if>
<if test="batchToken != null">
#{batchToken,jdbcType=VARCHAR},
</if>
<if test="dataSource != null">
#{dataSource,jdbcType=VARCHAR},
</if>
<if test="hiveSql != null">
#{hiveSql,jdbcType=LONGVARCHAR},
</if>
</trim>
</insert>
<update id="updateByPrimaryKeySelective" parameterType="com.gic.enterprise.entity.TabDownloadReport">
......@@ -274,6 +301,15 @@
<if test="projectCode != null">
project_code = #{projectCode,jdbcType=VARCHAR},
</if>
<if test="batchToken != null">
batch_token = #{batchToken,jdbcType=VARCHAR},
</if>
<if test="dataSource != null">
data_source = #{dataSource,jdbcType=VARCHAR},
</if>
<if test="hiveSql != null">
hive_sql = #{hiveSql,jdbcType=LONGVARCHAR},
</if>
</set>
where download_report_id = #{downloadReportId,jdbcType=INTEGER}
</update>
......@@ -300,9 +336,13 @@
auditor_name = #{auditorName,jdbcType=VARCHAR},
auditor_phone = #{auditorPhone,jdbcType=VARCHAR},
audit_reason = #{auditReason,jdbcType=VARCHAR},
project_code = #{projectCode,jdbcType=VARCHAR}
project_code = #{projectCode,jdbcType=VARCHAR},
batch_token = #{batchToken,jdbcType=VARCHAR},
data_source = #{dataSource,jdbcType=VARCHAR},
hive_sql = #{hiveSql,jdbcType=LONGVARCHAR}
where download_report_id = #{downloadReportId,jdbcType=INTEGER}
</update>
<select id="getBySelective" resultMap="BaseResultMap">
select
<include refid="Base_Column_List" />
......
......@@ -4,6 +4,7 @@ import com.gic.api.base.commons.Page;
import com.gic.api.base.commons.ServiceResponse;
import com.gic.commons.webapi.reponse.RestResponse;
import com.gic.download.utils.log.LogUtils;
import com.gic.enterprise.ano.IgnoreLogin;
import com.gic.enterprise.dto.DownloadReportDTO;
import com.gic.enterprise.qo.DownloadReportQO;
import com.gic.enterprise.service.DownloadReportApiService;
......@@ -14,6 +15,7 @@ import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
/**
......@@ -60,4 +62,12 @@ public class DownloadReportController {
}
return ResultControllerUtils.commonResult(serviceResponse);
}
@RequestMapping("receive-heart")
@ResponseBody
@IgnoreLogin
public RestResponse receiveHeart(Integer reportId){
ServiceResponse<Void> response = downloadReportApiService.receiveHeart(reportId);
return RestResponse.success();
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment