Commit 8d8a64d8 by zhangyannao

update

parent 2ab0c8ab
/gic-cloud-data-hook/target
/gic-cloud-data-hook-service/target
/gic-cloud-data-hook-api/target
/.idea
/gic-cloud-data-hook-service/gic-cloud-data-hook-service.iml
/gic-cloud.iml
/gic-cloud-data-hook/gic-cloud-data-hook.iml
/gic-cloud-data-hook-api/gic-cloud-data-hook-api.iml
......@@ -55,6 +55,27 @@
</dependency>
-->
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>2.8.2</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>2.8.2</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.8.2</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-web</artifactId>
<version>2.8.2</version>
</dependency>
<dependency>
<groupId>org.spark-project.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>1.2.1.spark</version>
......
......@@ -47,6 +47,7 @@ public class HiveHelper implements ApplicationContextAware {
// Class.forName("org.apache.hive.jdbc.HiveDriver");
// conn = DriverManager.getConnection(url, "hadoop", "");
conn = source.getConnection();
return conn;
} catch (Exception ex) {
ex.printStackTrace();
......
......@@ -80,6 +80,7 @@ public class MysqlHelper implements ApplicationContextAware {
conn.close();
return source;
}
conn.close();
} catch (Exception e) {
e.printStackTrace();
log.debug("getFreeQuerySource", "获取编号为 " + enterpriseId + " 的商户自定义查询库连接失败");
......
......@@ -16,22 +16,27 @@ public class TestHive {
private static org.slf4j.Logger logger= LoggerFactory.getLogger(IFlatQueryTableService.class);
public static void main(String[] args) {
String url = "jdbc:hive2://115.159.205.44:10015/data_test";
// String url = "jdbc:hive2://115.159.205.44:10015/data_test";
// try {
// System.out.println("准备连接");
// Class.forName("org.apache.hive.jdbc.HiveDriver");
// Connection conn = DriverManager.getConnection(url, "hadoop", "");
// System.out.println("连接成功");
// String sql = "show tables";
// System.out.println("SQL: "+sql);
// Statement stmt = conn.createStatement();
// ResultSet rs = stmt.executeQuery(sql);
// while(rs.next()){
// System.out.println(rs.getString(1));
// }
//
// System.out.println("处理结束");
// } catch (Exception e) {
// e.printStackTrace();
// }
try {
System.out.println("准备连接");
Class.forName("org.apache.hive.jdbc.HiveDriver");
Connection conn = DriverManager.getConnection(url, "hadoop", "");
System.out.println("连接成功");
String sql = "show tables";
System.out.println("SQL: "+sql);
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery(sql);
while(rs.next()){
System.out.println(rs.getString(1));
}
System.out.println("处理结束");
} catch (Exception e) {
Class.forName("org.apache.logging.log4j.core.LoggerContext");
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
......
......@@ -464,10 +464,12 @@ public class FlatQueryResultServiceImpl implements IFlatQueryResultService {
Connection conn = HiveHelper.getHiveConnection();
if (conn != null) {
try {
Statement stat = conn.createStatement();
stat.setQueryTimeout(60 * 1000);
stat.execute("REFRESH TABLE " + condition.getTableId()); // 强制刷新表结构
ResultSet rs = stat.executeQuery(fullQuery);
// 生成指定格式下载元文件
String originalFilePath = "";
if (task.getFormat().equals(DownloadFileFormat.CSV)) { // 如果指定为 CSV 格式
......
......@@ -15,20 +15,18 @@
<!--<import resource="classpath*:applicationContext-init.xml"/>-->
<import resource="classpath*:data-hook-service-druid.xml"/>
<import resource="classpath:data-hook-init.xml"/>
<import resource="classpath:data-hook-dubbo-settings.xml"/>
<import resource="classpath:data-hook-service-dubbo-config.xml"/>
<import resource="classpath:data-hook-flat-query-source.xml"/>
<import resource="classpath:data-hook-free-query-source.xml"/>
<!--
<import resource="classpath*:data-hook-service-druid-prod.xml"/>
<import resource="classpath:data-hook-init.xml"/>
<import resource="file:config/data-hook-dubbo-settings.xml"/>
<import resource="classpath:data-hook-service-dubbo-config.xml"/>
<import resource="file:config/data-hook-flat-query-source.xml"/>
<import resource="file:config/data-hook-free-query-source.xml"/>
-->
<!--<import resource="classpath:cloud-web-service-task.xml" />-->
<!--<import resource="classpath*:data-hook-service-druid.xml"/>-->
<!--<import resource="classpath:data-hook-init.xml"/>-->
<!--<import resource="classpath:data-hook-dubbo-settings.xml"/>-->
<!--<import resource="classpath:data-hook-service-dubbo-config.xml"/>-->
<!--<import resource="classpath:data-hook-flat-query-source.xml"/>-->
<!--<import resource="classpath:data-hook-free-query-source.xml"/>-->
<import resource="classpath*:data-hook-service-druid-prod.xml"/>
<import resource="classpath:data-hook-init.xml"/>
<import resource="file:config/data-hook-dubbo-settings.xml"/>
<import resource="classpath:data-hook-service-dubbo-config.xml"/>
<import resource="file:config/data-hook-flat-query-source.xml"/>
<import resource="file:config/data-hook-free-query-source.xml"/>
<!--<import resource="classpath:cloud-web-service-task.xml" />-->
</beans>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
<component name="ExternalSystem" externalSystem="Maven" />
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_5">
<output url="file://$MODULE_DIR$/target/classes" />
<output-test url="file://$MODULE_DIR$/target/test-classes" />
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/target" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment