Commit f911fa88 by guos

会员标签4.0

parents
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.gic</groupId>
<artifactId>gic-spark-tag-4.0</artifactId>
<version>1.0-SNAPSHOT</version>
<parent>
<groupId>com.gic</groupId>
<artifactId>gic-pom-base</artifactId>
<version>3.0-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>com.gic</groupId>
<artifactId>gic-spark-commons</artifactId>
<version>4.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.mongodb.spark</groupId>
<artifactId>mongo-spark-connector_2.11</artifactId>
<version>2.0.0</version>
<exclusions>
<exclusion>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
</exclusion>
</exclusions>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>gic.elasticsearch</groupId>
<artifactId>gic-elasticsearch-shaded</artifactId>
<version>2.2.1</version>
<exclusions>
<exclusion>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
</exclusion>
<exclusion>
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>delete-by-query</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<version>1.8.0</version>
</dependency>
</dependencies>
<build>
<finalName>gic-spark-tag</finalName>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<encoding>UTF-8</encoding>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<plugin>
<!-- Build an executable JAR -->
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.4</version>
<configuration>
<archive>
<manifest>
<addClasspath>true</addClasspath>
<classpathPrefix>lib/</classpathPrefix>
<!--<mainClass>${main.class}</mainClass>-->
<classpathMavenRepositoryLayout>false</classpathMavenRepositoryLayout>
</manifest>
</archive>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>copy-dependencies</id>
<phase>compile</phase>
<goals>
<goal>copy-dependencies</goal>
</goals>
<configuration>
<outputDirectory>${project.build.directory}/lib</outputDirectory>
<overWriteReleases>false</overWriteReleases>
<overWriteSnapshots>true</overWriteSnapshots>
<overWriteIfNewer>true</overWriteIfNewer>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.3</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<!--<transformers>
<transformer
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>${main.class}</mainClass>
</transformer>
</transformers>-->
<artifactSet>
<includes>
<include>com.gic:gic-spark-commons</include>
<include>org.elasticsearch:elasticsearch-hadoop</include>
<!--<include>jmock:*</include>-->
<!--<include>*:xml-apis</include>-->
<!--<include>org.apache.maven:lib:tests</include>-->
<!--<include>log4j:log4j:jar:</include>-->
</includes>
</artifactSet>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
\ No newline at end of file
package com.gic.spark;
import com.gic.sharding.sdk.ShardingConfigManager;
import com.gic.spark.application.SparkEnvManager;
import com.gic.spark.config.SparkConfigManager;
/**
* @description:
* @author: wangxk
* @date: 2020/4/15
*/
public class SparkTagProcess {
public static void main(String[] args){
if (args == null || args.length < 2) {
System.err.println("invalidate input params:");
System.err.println("1.is production: true/false");
System.err.println("2.extract data: true/false");
System.exit(2);
}
boolean isProd = Boolean.parseBoolean(args[0]);
boolean extractData = Boolean.parseBoolean(args[1]);
ShardingConfigManager.initDefualt();
SparkEnvManager.getInstance().init("member_tag_4.0");
SparkConfigManager.getInstance().init(isProd ? SparkConfigManager.EnvType.PRO : SparkConfigManager.EnvType.DEV, null);
}
}
package com.gic.spark.datasource;
import com.gic.sharding.sdk.ShardingConfigManager;
import com.gic.spark.datasource.entity.DataSourceEntity;
import com.gic.spark.datasource.entity.DataSourceMysql;
import com.gic.spark.datasource.entity.DataSourceSharding;
import com.gic.spark.datasource.mysql.MysqlDatasource;
import com.gic.spark.util.DingtalkMessageUtil;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @description:
* @author: wangxk
* @date: 2020/3/27
*/
public class DataSourceManager {
private Map<String, List<Integer>> datasourceEnterpriseMap = new HashMap();
private Map<String, DataSourceEntity> datasourceEntityMap = new HashMap();
private boolean isProduction = false;
private static DataSourceManager instance = null;
public static DataSourceManager getInstance() {
if (instance == null) {
instance = new DataSourceManager();
}
return instance;
}
private DataSourceManager() {
}
public void init(boolean isProduction) {
this.isProduction = isProduction;
}
public void addSourceEntity(DataSourceEntity sourceEntity, Integer enterpriseId) {
if (!isProduction) {
switch (sourceEntity.getSourceType()) {
case MYSQL:
DataSourceMysql mysqlDataSource = (DataSourceMysql) sourceEntity;
if (mysqlDataSource.getDatasource() == MysqlDatasource.prodDatasource) {
mysqlDataSource.setDatasource(MysqlDatasource.devDatasource);
}
break;
case MYSQL_SHARDING:
DataSourceSharding sourceSharding = (DataSourceSharding) sourceEntity;
if (sourceSharding.getShardingConfig().get_id().equals("member_sharding_readonly")) {
sourceSharding.setShardingConfig(ShardingConfigManager.getInstance().getCurrentShardingConfig("member_sharding_test"));
} else if (sourceSharding.getShardingConfig().get_id().equals("market_sharding")) {
sourceSharding.setShardingConfig(ShardingConfigManager.getInstance().getCurrentShardingConfig("market_sharding_test"));
} else {
DingtalkMessageUtil.sendAlertMessage("标签处理,分库分表数据源未配置开发环境:" + sourceSharding.getShardingConfig().get_id());
}
break;
case MONGODB:
break;
}
}
String key = sourceEntity.getSourceKey();
if (!datasourceEnterpriseMap.containsKey(key)) {
datasourceEnterpriseMap.put(key, new ArrayList());
}
if (!datasourceEntityMap.containsKey(key)) {
datasourceEntityMap.put(key, sourceEntity);
}
List<Integer> enterpriseList = datasourceEnterpriseMap.get(key);
if (!enterpriseList.contains(enterpriseId))
enterpriseList.add(enterpriseId);
}
public void extractDataToDatabase() {
for (Map.Entry<String, DataSourceEntity> entityEntry : datasourceEntityMap.entrySet()) {
String key = entityEntry.getKey();
DataSourceEntity sourceEntity = entityEntry.getValue();
sourceEntity.extractDataToPartitionedHiveTable(datasourceEnterpriseMap.get(key));
}
}
}
package com.gic.spark.datasource;
import com.gic.spark.application.SparkEnvManager;
import com.google.common.base.Joiner;
import com.google.common.collect.Maps;
import com.mongodb.spark.MongoSpark;
import com.mongodb.spark.config.ReadConfig;
import com.mongodb.spark.rdd.api.java.JavaMongoRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.bson.Document;
import java.util.HashMap;
import java.util.Map;
import static java.util.Collections.singletonList;
/**
* Created by paste on 2018/7/30 10:03
* <p>
* TODO: class description
*/
public class MongodbRddManager {
private static MongodbRddManager instance = null;
private Map<String, String> baseReadMap = new HashMap();
public static MongodbRddManager getInstance() {
if (instance == null) {
instance = new MongodbRddManager();
}
return instance;
}
private MongodbRddManager() {
baseReadMap.put("partitionerOptions.partitionSizeMB", "64");
baseReadMap.put("uri", "mongodb://mongouser:iP7LwQsezaWW@10.0.1.2:27017/gic-deploy.cluster_config?authSource=admin");
}
public JavaMongoRDD<Document> getDocument(String database, String collection) {
SparkEnvManager.getInstance().addProperty("spark.mongodb.input.uri", "mongodb://10.0.1.2:27017/gic-deploy.cluster_config?readPreference=secondaryPreferred");
SparkEnvManager.getInstance().addProperty("spark.mongodb.input.partitionerOptions.partitionSizeMB", "64");
JavaSparkContext jsc=SparkEnvManager.getInstance().getJsc();
Map<String, String> readOverrides = Maps.newHashMap(baseReadMap);
readOverrides.put("database", database);
readOverrides.put("collection", collection);
ReadConfig readConfig = ReadConfig.create(jsc).withOptions(readOverrides);
return MongoSpark.load(jsc, readConfig);
}
public JavaMongoRDD<Document> getDocumentsByColumn(String database, String collection, String columnName, String... values) {
System.out.println("extracting database: " + database + " collection: " + collection);
JavaMongoRDD<Document> document = getDocument(database, collection);
return document.withPipeline(singletonList(Document.parse(String.format("{$match : { %s: {$in:['" + Joiner.on("','").join(values) + "']}}}", columnName))));
}
public Dataset<Row> getDataset(String database, String collection) {
return getDocument(database, collection).toDF();
}
}
package com.gic.spark.datasource.entity;
import com.gic.spark.application.SparkEnvManager;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import java.util.List;
/**
* Created by paste on 2018/7/27 16:36
* <p>
* TODO: class description
*/
public abstract class DataSourceEntity {
// public static final int SOURCE_TYPE_MYSQL = 0;
// public static final int SOURCE_TYPE_MYSQL_SHARDING = 1;
// public static final int SOURCE_TYPE_MONGODB = 2;
// public static final int SOURCE_TYPE_HIVE = 3;
protected DataSourceType sourceType;
public DataSourceType getSourceType() {
return sourceType;
}
public abstract String getSourceKey();
public abstract String getHiveTableName();
public Dataset<Row> getDatasetByEnterpriseId(Integer enterpriseId) {
return SparkEnvManager.getInstance().getSparkSession().sql(String.format("select * from %s where enterprise_id='%s'", getHiveTableName(), enterpriseId));
}
public abstract void extractDataToPartitionedHiveTable(List<Integer> enterpriseList);
}
package com.gic.spark.datasource.entity;
import com.gic.spark.application.SparkEnvManager;
import com.gic.spark.util.HivePartitionUtil;
import com.gic.spark.util.SparkHiveUtil;
import com.google.common.base.Joiner;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import java.util.List;
/**
* Created by paste on 2018/7/27 16:59
* <p>
* TODO: class description
*/
public class DataSourceHive extends DataSourceEntity {
private String sourceTable;
private String schema;
private String table;
public DataSourceHive(String sourceTable) {
this.sourceType = DataSourceType.HIVE;
this.sourceTable = sourceTable;
String[] split = sourceTable.split("\\.");
if (split.length != 2) {
throw new RuntimeException("invalidate source table name: " + sourceTable);
}
schema = split[0];
table = split[1];
}
@Override
public String getSourceKey() {
return sourceType.getName() + "_" + sourceTable;
}
@Override
public String getHiveTableName() {
return "tag." + schema.replaceAll("\\.", "_") + "_" + table;
}
@Override
public void extractDataToPartitionedHiveTable(List<Integer> enterpriseList) {
SparkSession sparkSession = SparkEnvManager.getInstance().getSparkSession();
Dataset<Row> sourceDataset = sparkSession
.sql(String.format("select * from %s where enterprise_id in (%s)", sourceTable, Joiner.on("','").join(enterpriseList)))
.repartition(new Column("enterprise_id"));
if (table.equals("new_t_fact_order_ddd_cloud")) {
sourceDataset = sparkSession
.sql(String.format("select * from %s where enterprise_id in (%s) distribute by enterprise_id, is_member_order", sourceTable, Joiner.on("','").join(enterpriseList)));
}
SparkHiveUtil.createHivePartitionTable(sourceDataset, "enterprise_id", "tag", schema.replaceAll("\\.", "_") + "_" + table);
HivePartitionUtil.saveDatasetToPartitionTable(sparkSession, sourceDataset, getHiveTableName());
}
}
package com.gic.spark.datasource.entity;
import com.gic.spark.application.SparkEnvManager;
import com.gic.spark.datasource.MongodbRddManager;
import com.gic.spark.util.HivePartitionUtil;
import com.gic.spark.util.SparkHiveUtil;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import java.util.List;
/**
* Created by paste on 2018/7/27 16:59
* <p>
* TODO: class description
*/
public class DataSourceMongodb extends DataSourceEntity {
private String database;
private String collection;
private String partitionColumn;
public DataSourceMongodb(String database, String collection, String partitionColumn) {
sourceType = DataSourceType.MONGODB;
this.database = database;
this.collection = collection;
this.partitionColumn = partitionColumn;
}
public String getDatabase() {
return database;
}
public void setDatabase(String database) {
this.database = database;
}
@Override
public String getSourceKey() {
return sourceType.getName() + "_" + database + "_" + collection;
}
@Override
public String getHiveTableName() {
return "tag." + database + "_" + collection;
}
@Override
public void extractDataToPartitionedHiveTable(List<Integer> enterpriseList) {
SparkSession sparkSession = SparkEnvManager.getInstance().getSparkSession();
boolean isTableCreated = SparkHiveUtil.isTableExist("tag", database + "_" + collection, sparkSession);
String[] enterpriseIds = new String[enterpriseList.size()];
enterpriseList.toArray(enterpriseIds);
Dataset<Row> dataset = MongodbRddManager.getInstance().getDocumentsByColumn(database, collection, partitionColumn, enterpriseIds).toDF();
if (!isTableCreated) {
SparkHiveUtil.createHivePartitionTable(dataset, partitionColumn, "tag", database + "_" + collection);
isTableCreated = true;
}
HivePartitionUtil.saveDatasetToPartitionTable(sparkSession, dataset, getHiveTableName());
}
}
package com.gic.spark.datasource.entity;
import com.gic.spark.application.SparkEnvManager;
import com.gic.spark.datasource.mysql.MysqlDatasource;
import com.gic.spark.util.HivePartitionUtil;
import com.gic.spark.util.SparkHiveUtil;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import java.util.List;
/**
* Created by paste on 2018/7/27 16:57
* <p>
* TODO: class description
*/
public class DataSourceMysql extends DataSourceEntity {
private MysqlDatasource datasource;
private String targetTable;
public DataSourceMysql(MysqlDatasource datasource, String targetTable) {
sourceType = DataSourceType.MYSQL;
this.datasource = datasource;
this.targetTable = targetTable;
}
public MysqlDatasource getDatasource() {
return datasource;
}
public void setDatasource(MysqlDatasource datasource) {
this.datasource = datasource;
}
@Override
public String getSourceKey() {
return sourceType.getName() + "_" + datasource.getSchema() + "_" + targetTable;
}
@Override
public String getHiveTableName() {
return "tag." + datasource.getSchema().replaceAll("\\.", "_") + "_" + targetTable;
}
@Override
public void extractDataToPartitionedHiveTable(List<Integer> enterpriseList) {
SparkSession sparkSession = SparkEnvManager.getInstance().getSparkSession();
Dataset<Row> sourceDataset = datasource.buildRddManager().getDatasetByEnterpriseIds(targetTable, enterpriseList, null, null).repartition(new Column("enterprise_id"));
SparkHiveUtil.createHivePartitionTable(sourceDataset, "enterprise_id", "tag", datasource.getSchema().replaceAll("\\.", "_") + "_" + targetTable);
HivePartitionUtil.saveDatasetToPartitionTable(sparkSession, sourceDataset, getHiveTableName());
}
}
package com.gic.spark.datasource.entity;
import com.gic.sharding.sdk.ShardingConfigManager;
import com.gic.sharding.sdk.entity.ShardingConfig;
import com.gic.spark.application.SparkEnvManager;
import com.gic.spark.datasource.sharding.ShardingRddManager;
import com.gic.spark.util.HivePartitionUtil;
import com.gic.spark.util.SparkHiveUtil;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import java.util.List;
/**
* Created by paste on 2018/7/27 16:58
* <p>
* TODO: class description
*/
public class DataSourceSharding extends DataSourceEntity {
private ShardingConfig shardingConfig;
private String targetTable;
public DataSourceSharding(String shardingConfig, String targetTable) {
sourceType = DataSourceType.MYSQL_SHARDING;
this.shardingConfig = ShardingConfigManager.getInstance().getCurrentShardingConfig(shardingConfig);
this.targetTable = targetTable;
}
public ShardingConfig getShardingConfig() {
return shardingConfig;
}
public void setShardingConfig(ShardingConfig shardingConfig) {
this.shardingConfig = shardingConfig;
}
@Override
public String getSourceKey() {
return sourceType.getName() + "_" + shardingConfig.get_id() + "_" + targetTable;
}
@Override
public String getHiveTableName() {
return "tag." + shardingConfig.get_id() + "_" + targetTable;
}
@Override
public void extractDataToPartitionedHiveTable(List<Integer> enterpriseList) {
SparkSession sparkSession = SparkEnvManager.getInstance().getSparkSession();
ShardingRddManager shardingRddManager = new ShardingRddManager(shardingConfig,sparkSession);
Dataset<Row> sourceDataset = shardingRddManager.getDatasetByEnterpriseIds(targetTable, enterpriseList, null, null).repartition(new Column("enterprise_id"));
SparkHiveUtil.createHivePartitionTable(sourceDataset, "enterprise_id", "tag", shardingConfig.get_id().replaceAll("\\.", "_") + "_" + targetTable);
HivePartitionUtil.saveDatasetToPartitionTable(sparkSession, sourceDataset, getHiveTableName());
}
}
package com.gic.spark.datasource.entity;
/**
* Created by paste on 2018/7/27 16:38
* <p>
* TODO: class description
*/
public enum DataSourceType {
MYSQL("mysql"),
MYSQL_SHARDING("sharding"),
MONGODB("mongodb"),
HIVE("hive");
private String name;
DataSourceType(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
package com.gic.spark.entity;
import java.io.Serializable;
/**
* @description:
* @author: wangxk
* @date: 2019/10/15
*/
public class DataConfig implements Serializable {
public String type;
public DataOption data;
public DataConfig() {
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public DataOption getData() {
return data;
}
public void setData(DataOption data) {
this.data = data;
}
}
package com.gic.spark.entity;
import java.io.Serializable;
/**
* @description:
* @author: wangxk
* @date: 2019/10/15
*/
public class DataOption implements Serializable {
public String key;
public String compute;
public String value;
public String dealKey;
public DataOption() {
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public String getCompute() {
return compute;
}
public void setCompute(String compute) {
this.compute = compute;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public String getDealKey() {
return dealKey;
}
public void setDealKey(String dealKey) {
this.dealKey = dealKey;
}
}
package com.gic.spark.entity;
import java.io.Serializable;
/**
* @description:
* @author: wangxk
* @date: 2019/10/17
*/
public class EsColumnInfoDTO implements Serializable {
private String columnType;
private String columnKey;
public EsColumnInfoDTO() {
}
public String getColumnType() {
return this.columnType;
}
public void setColumnType(String columnType) {
this.columnType = columnType;
}
public String getColumnKey() {
return this.columnKey;
}
public void setColumnKey(String columnKey) {
this.columnKey = columnKey;
}
}
package com.gic.spark.entity;
import java.io.Serializable;
import java.util.List;
/**
* @description:
* @author: wangxk
* @date: 2019/10/15
*/
public class TagValue implements Serializable {
List<DataConfig> val;
List<TimeOption> time;
public TagValue() {
}
public List<DataConfig> getVal() {
return val;
}
public void setVal(List<DataConfig> val) {
this.val = val;
}
public List<TimeOption> getTime() {
return time;
}
public void setTime(List<TimeOption> time) {
this.time = time;
}
}
package com.gic.spark.entity;
import java.io.Serializable;
/**
* @description:
* @author: wangxk
* @date: 2019/10/15
*/
public class TimeOption implements Serializable {
public String type;
public String value;
public TimeOption() {
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
package com.gic.spark.entity.enumtype;
/**
* @author Luojs
* @version 1.0
* @Package com.gic.spark.filter.enumtype
* @Description: (这里用一句话描述这个类的作用)
* @date 九月 11 2018 ,17:39
*/
public enum AllOrExistType {
/**
* 所有
*/
ISALL("all"),
/**
* 存在
*/
ISEXIST("exist");
private String value;
AllOrExistType(String value){
this.value = value;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
package com.gic.spark.entity.enumtype;
/**
* @author Luojs
* @version 1.0
* @Package com.gic.spark.filter.enumtype
* @Description: (这里用一句话描述这个类的作用)
* @date 九月 11 2018 ,17:40
*/
public enum BelongType {
/**
* 属于
*/
ISBELONG("belong"),
/**
* 不属于
*/
NOTBELONG("notbelong");
private String value;
BelongType(String value){
this.value = value;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
package com.gic.spark.entity.enumtype;
/**
* @author Luojs
* @version 1.0
* @Package com.gic.spark
* @Description: (这里用一句话描述这个类的作用)
* @date 八月 21 2018 ,9:49
*/
public enum CardType {
/**
* 已使用卡券
*/
USEDCARD,
/**
* 持有卡券
*/
AVAILABLECARD,
//待领取
UNHOLD,
//已过期
EXPIRED;
private CardType(){}
}
package com.gic.spark.entity.enumtype;
/**
* @author Luojs
* @version 1.0
* @Package com.gic.spark.filter.enumtype
* @Description: (这里用一句话描述这个类的作用)
* @date 九月 11 2018 ,19:18
*/
public enum DayMonYearType {
/**
* 天
*/
DAY("day"),
/**
* 月
*/
MONTH("month"),
/**
* 年
*/
YEAR("year");
private String value;
DayMonYearType(String value){
this.value = value;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
package com.gic.spark.entity.enumtype;
/**
* @author Luojs
* @version 1.0
* @Package com.gic.spark.filter.enumtype
* @Description: (这里用一句话描述这个类的作用)
* @date 九月 10 2018 ,15:49
*/
public enum DescOrAscType {
/**
* 降序 最近
*/
DESC("desc"),
/**
* 升序 首次
*/
ASC("asc");
private String value;
DescOrAscType(String value){
this.value = value;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
package com.gic.spark.entity.enumtype;
/**
* @author Luojs
* @version 1.0
* @Package com.gic.spark.filter.enumtype
* @Description: (固定,历史,最近)
* @date 九月 11 2018 ,19:14
*/
public enum FixHisRelaType {
FIRSTTIME("firsttime"),
LASTTIME("lasttime"),
/**
* 最近 几天
*/
RECENTTIME("recenttime"),
/**
* 固定时段
*/
FIXEDTIME("fixedtime"),
/**
* 历史累计,所有时间的所有数据
*/
HISTORYTIME("historytime");
private String value;
FixHisRelaType(String value){
this.value = value;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
package com.gic.spark.entity.enumtype;
/**
* @author Luojs
* @version 1.0
* @Package com.gic.spark.filter.enumtype
* @Description: (固定,相对)
* @date 九月 11 2018 ,19:12
*/
public enum FixedOrRelativeType {
/**
* 固定时段
*/
FIXEDTIME("fixedtime"),
/**
* 相对时段
*/
RELATIVETIME("relativetime"),;
private String value;
FixedOrRelativeType(String value){
this.value = value;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
package com.gic.spark.entity.enumtype;
/**
* @author Luojs
* @version 1.0
* @Package com.gic.spark
* @Description: (营销类型)
* @date 八月 20 2018 ,20:46
*/
public enum MarketingType {
SMS("message"),
CARD("card"),
WXIMAGETEXT("teletext"),
WXTEXT("text"),
WXIMAGE("image"),
TELTASK("teltask"),
WXA("wxa");
// SMS("短信"),
// CARD("卡券"),
// WXIMAGETEXT("微信图文"),
// WXTEXT("微信文本"),
// WXIMAGE("微信图片"),
//
// TELTASK("话务"),
// WXA("小程序");
private String value;
MarketingType(String value){
this.value = value;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
package com.gic.spark.entity.enumtype;
/**
* @author Luojs
* @version 1.0
* @Package com.gic.spark.filter.enumtype
* @Description: (这里用一句话描述这个类的作用)
* @date 十月 10 2018 ,16:03
*/
public enum MemberSignType {
/**
* 线上签到
*/
LINESIGN(1)
,
/**
* 门店签到
*/
STORESIGN(2);
private int value;
MemberSignType(int value){
this.value = value;
}
public int getValue() {
return value;
}
public void setValue(int value) {
this.value = value;
}
}
package com.gic.spark.entity.enumtype;
/**
* @description:
* @author: wangxk
* @date: 2019/5/10
*/
public enum NumberIntervalType {
BETWEEN("between"),
EQUAL("="),
LESSEQUAL("<="),
LARGEREQUAL(">="),
LESS("<"),
LARGER(">");
private String value;
NumberIntervalType(String value) {
this.value = value;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
package com.gic.spark.entity.enumtype;
/**
* @author Luojs
* @version 1.0
* @Package com.gic.spark.filter.enumtype
* @Description: (这里用一句话描述这个类的作用)
* @date 九月 11 2018 ,19:20
*/
public enum PreseBefOrLaterType {
/**
* 当天 当月 当年
*/
PRESENT_TIME("present_time"),
// /**
// * 当天前 当月前 当年前 不含当天 当月 当年
// */
// PRESENTTIMEBEFORE("presenttimebefore"),
/**
* 最近n天后 最近n月后 最近n年后 不含当天 当月 当年
*/
//最近
RECENT_TIME("recent_time"),
//最近包含现在
RECENT_HAS_PRESENT_TIME("recent_has_present_time");
private String value;
PreseBefOrLaterType(String value){
this.value = value;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
package com.gic.spark.entity.enumtype;
/**
* @author Luojs
* @version 1.0
* @Package com.gic.spark
* @Description: (短信退订状态)
* @date 八月 20 2018 ,21:04
*/
public enum SmsStatusType {
/**
* 短信退订
*/
SMSUNSUBSCRIBE(1),
/**
* 短信未退订
*/
SMSNOTUNSUBSCRIBE(-1);
private int value;
SmsStatusType(int value){
this.value = value;
}
public int getValue() {
return value;
}
public void setValue(int value) {
this.value = value;
}
}
package com.gic.spark.entity.enumtype;
/**
* @author Luojs
* @version 1.0
* @Package com.gic.spark
* @Description: (这里用一句话描述这个类的作用)
* @date 八月 21 2018 ,9:19
*/
public enum WxMarketingType {
/**
* 微信图文
*/
WXIMAGETEXT(0),
/**
* 微信文本
*/
WXTEXT(1),
WXIMAGE(2);
private int value;
private WxMarketingType(int value){
this.value = value;
}
public int getValue() {
return value;
}
public void setValue(int value) {
this.value = value;
}
}
package com.gic.spark.entity.table;
public class TabMemberTagAll {
private long tag_Id;
private long enterprise_Id;
private long link_Id;
private String tag_Name;
private long tag_Type;
private String tag_Describe;
private long category_Id;
private long real_Time_Flag;
private long delete_Flag;
private String remark;
private String es_Field;
private String tag_Code;
private long tag_Group_Id;
private long contain_Flag;
private long create_User_Id;
private java.sql.Timestamp create_Time;
private java.sql.Timestamp update_Time;
public long getTag_Id() {
return tag_Id;
}
public void setTag_Id(long tag_Id) {
this.tag_Id = tag_Id;
}
public long getEnterprise_Id() {
return enterprise_Id;
}
public void setEnterprise_Id(long enterprise_Id) {
this.enterprise_Id = enterprise_Id;
}
public long getLink_Id() {
return link_Id;
}
public void setLink_Id(long link_Id) {
this.link_Id = link_Id;
}
public String getTag_Name() {
return tag_Name;
}
public void setTag_Name(String tag_Name) {
this.tag_Name = tag_Name;
}
public long getTag_Type() {
return tag_Type;
}
public void setTag_Type(long tag_Type) {
this.tag_Type = tag_Type;
}
public String getTag_Describe() {
return tag_Describe;
}
public void setTag_Describe(String tag_Describe) {
this.tag_Describe = tag_Describe;
}
public long getCategory_Id() {
return category_Id;
}
public void setCategory_Id(long category_Id) {
this.category_Id = category_Id;
}
public long getReal_Time_Flag() {
return real_Time_Flag;
}
public void setReal_Time_Flag(long real_Time_Flag) {
this.real_Time_Flag = real_Time_Flag;
}
public long getDelete_Flag() {
return delete_Flag;
}
public void setDelete_Flag(long delete_Flag) {
this.delete_Flag = delete_Flag;
}
public String getRemark() {
return remark;
}
public void setRemark(String remark) {
this.remark = remark;
}
public String getEs_Field() {
return es_Field;
}
public void setEs_Field(String es_Field) {
this.es_Field = es_Field;
}
public String getTag_Code() {
return tag_Code;
}
public void setTag_Code(String tag_Code) {
this.tag_Code = tag_Code;
}
public long getTag_Group_Id() {
return tag_Group_Id;
}
public void setTag_Group_Id(long tag_Group_Id) {
this.tag_Group_Id = tag_Group_Id;
}
public long getContain_Flag() {
return contain_Flag;
}
public void setContain_Flag(long contain_Flag) {
this.contain_Flag = contain_Flag;
}
public long getCreate_User_Id() {
return create_User_Id;
}
public void setCreate_User_Id(long create_User_Id) {
this.create_User_Id = create_User_Id;
}
public java.sql.Timestamp getCreate_Time() {
return create_Time;
}
public void setCreate_Time(java.sql.Timestamp create_Time) {
this.create_Time = create_Time;
}
public java.sql.Timestamp getUpdate_Time() {
return update_Time;
}
public void setUpdate_Time(java.sql.Timestamp update_Time) {
this.update_Time = update_Time;
}
}
package com.gic.spark.entity.table;
public class TabMemberTagCategory {
private long category_Id;
private String category_Name;
private long parent_Category_Id;
private long special_Tag_Flag;
private String special_Tag_Url;
private long support_Custom_Tag_Flag;
private long support_Custom_Tag_Found_Flag;
private String tab_Version;
private long delete_Flag;
private String remark;
private String create_User_Id;
private java.sql.Timestamp create_Time;
private java.sql.Timestamp update_Time;
private long sort;
public long getCategory_Id() {
return category_Id;
}
public void setCategory_Id(long category_Id) {
this.category_Id = category_Id;
}
public String getCategory_Name() {
return category_Name;
}
public void setCategory_Name(String category_Name) {
this.category_Name = category_Name;
}
public long getParent_Category_Id() {
return parent_Category_Id;
}
public void setParent_Category_Id(long parent_Category_Id) {
this.parent_Category_Id = parent_Category_Id;
}
public long getSpecial_Tag_Flag() {
return special_Tag_Flag;
}
public void setSpecial_Tag_Flag(long special_Tag_Flag) {
this.special_Tag_Flag = special_Tag_Flag;
}
public String getSpecial_Tag_Url() {
return special_Tag_Url;
}
public void setSpecial_Tag_Url(String special_Tag_Url) {
this.special_Tag_Url = special_Tag_Url;
}
public long getSupport_Custom_Tag_Flag() {
return support_Custom_Tag_Flag;
}
public void setSupport_Custom_Tag_Flag(long support_Custom_Tag_Flag) {
this.support_Custom_Tag_Flag = support_Custom_Tag_Flag;
}
public long getSupport_Custom_Tag_Found_Flag() {
return support_Custom_Tag_Found_Flag;
}
public void setSupport_Custom_Tag_Found_Flag(long support_Custom_Tag_Found_Flag) {
this.support_Custom_Tag_Found_Flag = support_Custom_Tag_Found_Flag;
}
public String getTab_Version() {
return tab_Version;
}
public void setTab_Version(String tab_Version) {
this.tab_Version = tab_Version;
}
public long getDelete_Flag() {
return delete_Flag;
}
public void setDelete_Flag(long delete_Flag) {
this.delete_Flag = delete_Flag;
}
public String getRemark() {
return remark;
}
public void setRemark(String remark) {
this.remark = remark;
}
public String getCreate_User_Id() {
return create_User_Id;
}
public void setCreate_User_Id(String create_User_Id) {
this.create_User_Id = create_User_Id;
}
public java.sql.Timestamp getCreate_Time() {
return create_Time;
}
public void setCreate_Time(java.sql.Timestamp create_Time) {
this.create_Time = create_Time;
}
public java.sql.Timestamp getUpdate_Time() {
return update_Time;
}
public void setUpdate_Time(java.sql.Timestamp update_Time) {
this.update_Time = update_Time;
}
public long getSort() {
return sort;
}
public void setSort(long sort) {
this.sort = sort;
}
}
package com.gic.spark.entity.table;
public class TabMemberTagCollect {
private long tag_Collect_Id;
private long tag_Id;
private long enterprise_Id;
private long create_User_Id;
private java.sql.Timestamp create_Time;
private java.sql.Timestamp update_Time;
public long getTag_Collect_Id() {
return tag_Collect_Id;
}
public void setTag_Collect_Id(long tag_Collect_Id) {
this.tag_Collect_Id = tag_Collect_Id;
}
public long getTag_Id() {
return tag_Id;
}
public void setTag_Id(long tag_Id) {
this.tag_Id = tag_Id;
}
public long getEnterprise_Id() {
return enterprise_Id;
}
public void setEnterprise_Id(long enterprise_Id) {
this.enterprise_Id = enterprise_Id;
}
public long getCreate_User_Id() {
return create_User_Id;
}
public void setCreate_User_Id(long create_User_Id) {
this.create_User_Id = create_User_Id;
}
public java.sql.Timestamp getCreate_Time() {
return create_Time;
}
public void setCreate_Time(java.sql.Timestamp create_Time) {
this.create_Time = create_Time;
}
public java.sql.Timestamp getUpdate_Time() {
return update_Time;
}
public void setUpdate_Time(java.sql.Timestamp update_Time) {
this.update_Time = update_Time;
}
}
package com.gic.spark.entity.table;
public class TabMemberTagDetail {
private long tag_Id;
private long enterprise_Id;
private String tag_Name;
private String tag_Describe;
private long category_Id;
private long real_Time_Flag;
private long delete_Flag;
private String remark;
private java.sql.Timestamp create_Time;
private java.sql.Timestamp update_Time;
private String es_Field;
private String tag_Code;
private long tag_Group_Id;
private long contain_Flag;
public long getTag_Id() {
return tag_Id;
}
public void setTag_Id(long tag_Id) {
this.tag_Id = tag_Id;
}
public long getEnterprise_Id() {
return enterprise_Id;
}
public void setEnterprise_Id(long enterprise_Id) {
this.enterprise_Id = enterprise_Id;
}
public String getTag_Name() {
return tag_Name;
}
public void setTag_Name(String tag_Name) {
this.tag_Name = tag_Name;
}
public String getTag_Describe() {
return tag_Describe;
}
public void setTag_Describe(String tag_Describe) {
this.tag_Describe = tag_Describe;
}
public long getCategory_Id() {
return category_Id;
}
public void setCategory_Id(long category_Id) {
this.category_Id = category_Id;
}
public long getReal_Time_Flag() {
return real_Time_Flag;
}
public void setReal_Time_Flag(long real_Time_Flag) {
this.real_Time_Flag = real_Time_Flag;
}
public long getDelete_Flag() {
return delete_Flag;
}
public void setDelete_Flag(long delete_Flag) {
this.delete_Flag = delete_Flag;
}
public String getRemark() {
return remark;
}
public void setRemark(String remark) {
this.remark = remark;
}
public java.sql.Timestamp getCreate_Time() {
return create_Time;
}
public void setCreate_Time(java.sql.Timestamp create_Time) {
this.create_Time = create_Time;
}
public java.sql.Timestamp getUpdate_Time() {
return update_Time;
}
public void setUpdate_Time(java.sql.Timestamp update_Time) {
this.update_Time = update_Time;
}
public String getEs_Field() {
return es_Field;
}
public void setEs_Field(String es_Field) {
this.es_Field = es_Field;
}
public String getTag_Code() {
return tag_Code;
}
public void setTag_Code(String tag_Code) {
this.tag_Code = tag_Code;
}
public long getTag_Group_Id() {
return tag_Group_Id;
}
public void setTag_Group_Id(long tag_Group_Id) {
this.tag_Group_Id = tag_Group_Id;
}
public long getContain_Flag() {
return contain_Flag;
}
public void setContain_Flag(long contain_Flag) {
this.contain_Flag = contain_Flag;
}
}
package com.gic.spark.entity.table;
public class TabMemberTagValue {
private long tag_Value_Id;
private long enterprise_Id;
private String tag_Value;
private long tag_Id;
private java.sql.Timestamp create_Time;
private java.sql.Timestamp update_Time;
public long getTag_Value_Id() {
return tag_Value_Id;
}
public void setTag_Value_Id(long tag_Value_Id) {
this.tag_Value_Id = tag_Value_Id;
}
public long getEnterprise_Id() {
return enterprise_Id;
}
public void setEnterprise_Id(long enterprise_Id) {
this.enterprise_Id = enterprise_Id;
}
public String getTag_Value() {
return tag_Value;
}
public void setTag_Value(String tag_Value) {
this.tag_Value = tag_Value;
}
public long getTag_Id() {
return tag_Id;
}
public void setTag_Id(long tag_Id) {
this.tag_Id = tag_Id;
}
public java.sql.Timestamp getCreate_Time() {
return create_Time;
}
public void setCreate_Time(java.sql.Timestamp create_Time) {
this.create_Time = create_Time;
}
public java.sql.Timestamp getUpdate_Time() {
return update_Time;
}
public void setUpdate_Time(java.sql.Timestamp update_Time) {
this.update_Time = update_Time;
}
}
package com.gic.spark.tag;
/**
* @description:
* @author: wangxk
* @date: 2020/4/15
*/
public class TagConstant {
public static final String TAG_COMPUTE_TYPE_BETWEEN = "between";//区间筛选
public static final String TAG_COMPUTE_TYPE_EQUAL = "=";
public static final String TAG_COMPUTE_TYPE_LESS_EQUAL = "<=";
public static final String TAG_COMPUTE_TYPE_LARGER_EQUAL = ">=";
public static final String TAG_COMPUTE_TYPE_LESS = "<";
public static final String TAG_COMPUTE_TYPE_LARGER = ">";
public static final String TAG_COMPUTE_TYPE_NOT_EQUAL = "!=";
public static final String TAG_COMPUTE_TYPE_LIKE = "like";//模糊匹配
public static final String TAG_COMPUTE_TYPE_IN = "in";//包含
public static final String TAG_COMPUTE_TYPE_NOT_IN = "notIn";//不包含
public static final String TAG_COMPUTE_TYPE_ALL_IN = "allIn";//包含所有
public static final String TAG_COMPUTE_TYPE_NOT_ALL_IN = "notAllIn";//不包含所有
public static final String TAG_COMPUTE_TYPE_AND = "and";//并且
public static final String TAG_COMPUTE_TYPE_OR = "or";//或者
public static final String TAG_COMPUTE_TYPE_IN_NOT_ALL = "inNA";//包含不分词
public static final String TAG_COMPUTE_TYPE_TODAY = "today"; //今天
public static final String TAG_COMPUTE_TYPE_LAST_DAY = "lastday";//最近几天
public static final String TAG_COMPUTE_TYPE_AFTER_DAY = "afterday";//之后几天
public static final String TAG_COMPUTE_TYPE_CURRENTMONTH = "currentMonth"; //当月
public static final String TAG_COMPUTE_TYPE_LAST_MONTH = "lastmonth";//最近几月
public static final String TAG_COMPUTE_TYPE_AFTER_MONTH = "aftermonth";//之后几月
public static final String TAG_COMPUTE_TYPE_CURRENTYEAR = "currentYear"; //当年
public static final String TAG_COMPUTE_TYPE_LAST_YEAR = "lastyear";//最近几年
public static final String TAG_COMPUTE_TYPE_AFTER_YEAR = "afteryear";//之后几年
public static final String TAG_COMPUTE_TYPE_LAST_DAY_HAS_TODAY= "lastdayHasToday"; //最近几天包含今天
public static final String TAG_COMPUTE_TYPE_AFTER_DAY_HAS_TODAY="afterdayHasToday"; //之后几天包含今天
public static final String TAG_COMPUTE_TYPE_LAST_MONTH_HAS_CURRENT_MONTH="lastmonthHasCurrentMonth"; //最近几月包含当月
public static final String TAG_COMPUTE_TYPE_AFTER_MONTH_HAS_CURRENT_MONTH="aftermonthHasCurrentMonth"; //之后几月包含当月
public static final String TAG_COMPUTE_TYPE_LAST_YEAR_HAS_CURRENT_YEAR="lastyearHasCurrentYear"; //最近几年包含当年
public static final String TAG_COMPUTE_TYPE_AFTER_YEAR_HAS_CURRENT_YEAR="afteryearHasCurrentYear"; //之后几年包含当年
public static final String TAG_TIME_TYPE_BETWEEN = "between";//固定时间段
public static final String TAG_TIME_TYPE_TODAY = "today"; //今天
public static final String TAG_TIME_TYPE_LAST_DAY = "lastday";//最近几天
public static final String TAG_TIME_TYPE_AFTER_DAY = "afterday";//之后几天
public static final String TAG_TIME_TYPE_LAST_DYA_HAS_TODAT ="lastdayHasToday"; //最近几天包含今天
public static final String TAG_TIME_TYPE_CURRENTMONTH = "currentMonth"; //当月
public static final String TAG_TIME_TYPE_LAST_MONTH = "lastmonth";//最近几月
public static final String TAG_TIME_TYPE_AFTER_MONTH = "aftermonth";//之后几月
public static final String TAG_TIME_TYPE_LAST_MONTH_HASCURRENTMONTH ="lastmonthHasCurrentMonth"; //最近几月包含当月
public static final String TAG_TIME_TYPE_CURRENTYEAR = "currentYear"; //当年
public static final String TAG_TIME_TYPE_LAST_YEAR = "lastyear";//最近几年
public static final String TAG_TIME_TYPE_AFTER_YEAR = "afteryear";//之后几年
public static final String TAG_TIME_TYPE_LAST_YEAR_HASCURRENTYEAR = "CRlastyearHasCurrentYear"; //最近几年包含当年
public static final String TAG_TIME_TYPE_ALL_TIME = "alltime";//目前为止
}
package com.gic.spark.tag;
/**
* @description:
* @author: wangxk
* @date: 2020/4/15
*/
public class TagProcess {
private boolean isProduction;
private static TagProcess instance;
public static TagProcess getInstance(){
if(null==instance){
instance=new TagProcess();
}
return instance;
}
private TagProcess(){}
public boolean isProduction() {
return isProduction;
}
public void init(boolean isProd){
this.isProduction=isProd;
if(isProduction){
}
}
public void process(){
}
}
package com.gic.spark.tag;
/**
* @description:
* @author: wangxk
* @date: 2020/4/15
*/
public class TagValueParser {
}
package com.gic.spark.util;
import scala.Tuple2;
import java.util.HashMap;
import java.util.Map;
/**
* @description:
* @author: wangxk
* @date: 2019/6/24
*/
public class CommonUtil {
public static Map<String,Integer> enterPerformanceMap=new HashMap();
public static Map<String, Tuple2<String,String>>userGroupMap=new HashMap();
}
package com.gic.spark.util;
import com.alibaba.druid.pool.DruidDataSource;
import java.lang.reflect.Field;
import java.sql.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @description:
* @author: wangxk
* @date: 2018/7/27
*/
public class DaoHelper {
public static PreparedStatement getPreparedStatement(Connection connection,String sql) throws SQLException {
return connection.prepareStatement(sql);
}
public static List query(Connection connection , String sql, Class clazz , Object... args){
List result = new ArrayList();
PreparedStatement statement=null;
try {
statement=getPreparedStatement(connection,sql);
if(null!=args){
for(int i=0;i<args.length;i++){
statement.setObject(i+1,args[i]);
}
}
ResultSet resultSet=statement.executeQuery();
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
//业务对象的属性数组
Field[] fields = clazz.getDeclaredFields();
while (resultSet.next()) {
Object obj = clazz.newInstance();//构造业务对象实体
int colLen=resultSetMetaData.getColumnCount();
for (int i = 1; i <=colLen ; i++) {
String columnLable = resultSetMetaData.getColumnLabel(i );
Object columnValue = resultSet.getObject(i);
for(int j=0;j<fields.length;j++){
Field f=fields[j];
if(f.getName().equalsIgnoreCase(columnLable)){//忽略大小写
boolean flag = f.isAccessible();
f.setAccessible(true);
f.set(obj, columnValue);
f.setAccessible(flag);
}
}
}
result.add(obj);
}
} catch (SQLException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InstantiationException e) {
e.printStackTrace();
} finally {
if(null!=statement){
try {
statement.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
if(null!=connection){
try {
connection.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
return result;
}
public static List<Map<String, Object>> select(Connection connection ,String sql, Object... args){
List<Map<String, Object>> result = new ArrayList();
PreparedStatement statement=null;
try {
statement=getPreparedStatement(connection,sql);
if(null!=args){
for(int i=0;i<args.length;i++){
statement.setObject(i+1,args[i]);
}
}
ResultSet resultSet=statement.executeQuery();
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
while (resultSet.next()) {
Map<String, Object> values = new HashMap<String, Object>();
int colLen=resultSetMetaData.getColumnCount();
for (int i = 0; i <colLen ; i++) {
String columnLable = resultSetMetaData.getColumnLabel(i + 1);
Object columnValue = resultSet.getObject(i + 1);
values.put(columnLable, columnValue);
}
result.add(values);
}
} catch (SQLException e) {
e.printStackTrace();
}finally {
if(null!=statement){
try {
statement.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
if(null!=connection){
try {
connection.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
return result;
}
public static void closePreparedStatement(PreparedStatement preparedStatement){
if(null!=preparedStatement){
try {
preparedStatement.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
public static void closeConnection(Connection connection){
if(null!=connection){
try {
connection.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
public static void main(String args[]) throws SQLException {
// MysqlDatasource md=new MysqlDatasource();
// List<Map<String, Object>> rsList= DaoHelper.query(md.getConnection(),"select count(1) count from tab_gic_erp_interface_control " +
// "where enterprise_id=? and method_name='addMemberId' and enable_status=0 and open_status=0",new Object[]{"ff808081675af0d801675df0514701ea"});
// System.out.println(rsList.size());
DruidDataSource dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mysql://56cbb9f62fac6.sh.cdb.myqcloud.com:6634/enterprise_extract_byenterpriseid?useUnicode=true&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull&autoReconnect=true&rewriteBatchedStatements=true");
dataSource.setUsername("cdb_outerroot");
dataSource.setPassword("@09ui%sbc09");
// String sql=String.format("select * from tab_enterprisebyid_data_sync");
// System.out.println("query:sql==>"+sql);
// List<ColumnInfo> result=DaoHelper.query(dataSource.getConnection(),sql,new String[]{});
// System.out.println(JSONObject.toJSONString(result));
}
}
package com.gic.spark.util;
import com.alibaba.fastjson.JSON;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
/**
* @description:
* @author: wangxk
* @date: 2019/10/18
*/
public class EntityUtil {
private static Logger logger = LogManager.getLogger(EntityUtil.class);
public EntityUtil() {
}
public static <T> T changeEntityNew(Class<T> target, Object baseTO) {
try {
T obj = target.newInstance();
if(baseTO == null) {
return null;
} else {
BeanUtils.copyProperties(baseTO, obj);
return obj;
}
} catch (Exception var3) {
return changeEntity(target, baseTO);
}
}
public static <T> List<T> changeEntityListNew(Class<T> target, List<?> list) {
if(list != null && list.size() != 0) {
List<T> resultList = new ArrayList();
Iterator var3 = list.iterator();
while(var3.hasNext()) {
Object obj = var3.next();
resultList.add(changeEntityNew(target, obj));
}
return resultList;
} else {
return null;
}
}
public static <T> T changeEntity(Class<T> target, Object baseTO) {
T obj = null;
if(baseTO == null) {
return null;
} else {
try {
obj = target.newInstance();
List<Method> targetmethods = Arrays.asList(target.getDeclaredMethods());
List<String> methodnames = new ArrayList();
Iterator var5 = targetmethods.iterator();
while(var5.hasNext()) {
Method method = (Method)var5.next();
methodnames.add(method.getName());
}
Method[] methods = baseTO.getClass().getDeclaredMethods();
for(int i = 0; i < methods.length; ++i) {
Method method = methods[i];
if(method.getName().startsWith("get")) {
String _tar = "set" + method.getName().substring(method.getName().indexOf("get") + 3);
if(methodnames.contains(_tar)) {
Method tarMethod = (Method)targetmethods.get(methodnames.indexOf(_tar));
if(tarMethod.getParameterTypes()[0].equals(method.getReturnType())) {
tarMethod.setAccessible(true);
tarMethod.invoke(obj, new Object[]{method.invoke(baseTO, new Object[0])});
}
}
}
}
} catch (Exception var10) {
logger.error("解析错误", var10);
}
return obj;
}
}
public static <T> T changeEntityByJSON(Class<T> target, Object baseTO) {
T obj = JSON.parseObject(JSON.toJSONString(baseTO), target);
return obj;
}
public static <T> List<T> changeEntityListByJSON(Class<T> target, Object baseTO) {
List<T> obj = JSON.parseArray(JSON.toJSONString(baseTO), target);
return obj;
}
}
package com.gic.spark.util;
import com.alibaba.fastjson.JSONObject;
import org.apache.http.HttpResponse;
import java.util.Map;
/**
* Created by paste on 2018/7/21 16:29
* <p>
* TODO: class description
*/
public class EsRequestUtil {
public static String getESIindexName(String enterprise_id, boolean isProd) {
String esIndexName = "";
String url;
if (isProd) {
url = "http://demogic.com/screening/getEsNameAlias.json?enterpriseId=" + enterprise_id + "&code=member";
} else {
url = "http://gicdev.demogic.com/screening/getEsNameAlias.json?enterpriseId=" + enterprise_id + "&code=member";
}
HttpResponse response = HttpClient.getHttpResponseByGet(url);
int responseCode = response.getStatusLine().getStatusCode();
if (responseCode == 200) {
JSONObject jsonObject = JSONObject.parseObject(HttpClient.getResponseString(response));
// System.out.println("jsonObject "+jsonObject.toJSONString());
Map result = (Map) jsonObject.get("response");
int code = (Integer) result.get("errcode");
if (code == 1) {
esIndexName = (String) result.get("name");
}
}
return esIndexName;
}
}
package com.gic.spark.util;
import com.gic.spark.datasource.mysql.MysqlDatasource;
import com.google.common.base.Joiner;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.StructField;
import java.util.ArrayList;
import java.util.List;
/**
* Created by paste on 2018/7/27 14:51
* <p>
* TODO: class description
*/
public class HivePartitionUtil {
public static void saveDatasetToPartitionTable(SparkSession sparkSession, Dataset<Row> dataset, String targetTable) {
if (!SparkHiveUtil.isFieldMatch(dataset, sparkSession.table(targetTable))) {
dataset.printSchema();
throw new RuntimeException("save into partition table error: fields dismatch " + targetTable);
}
System.out.println("hiveMetastoreDatasource.Schema==>"+ MysqlDatasource.hiveMetastoreDatasource.getSchema());
System.out.println("hiveMetastoreDatasource.JdbcUrl==>"+ MysqlDatasource.hiveMetastoreDatasource.getJdbcUrl());
System.out.println("hiveMetastoreDatasource.User==>"+ MysqlDatasource.hiveMetastoreDatasource.getUser());
System.out.println("hiveMetastoreDatasource.Password==>"+ MysqlDatasource.hiveMetastoreDatasource.getPassword());
List<String> partitionColumns = SparkHiveUtil.getHiveTablePartitionColumns(targetTable);
if (partitionColumns.size() == 0) {
throw new RuntimeException("target table is not paritioned: " + targetTable);
}
List<String> columns = new ArrayList();
for (StructField structField : dataset.schema().fields()) {
if (!partitionColumns.contains(structField.name().toLowerCase()))
columns.add(structField.name());
}
columns.addAll(partitionColumns);
dataset.createOrReplaceTempView("tmp_table");
// generate sql
sparkSession.sql(String.format("INSERT overwrite table %s\n" +
"PARTITION (%s) SELECT %s from tmp_table", targetTable, Joiner.on(",").join(partitionColumns), Joiner.on(",").join(columns)));
System.out.println(targetTable+" table write finish !");
}
}
package com.gic.spark.util;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
/**
* Created by paste on 2018/7/26 09:23
* <p>
* TODO: class description
*/
public class HiveTableMoveUtil {
public static void moveTableAsDynamicPartitioned(SparkSession sparkSession, String source, String dest, String... partitions) {
Dataset<Row> dataset = sparkSession.sql("select * from " + source);
dataset.write().mode(SaveMode.Overwrite).partitionBy(partitions).saveAsTable(dest);
}
}
package com.gic.spark.util;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Created by paste on 2019/4/8 17:04
* <p>
* TODO: class description
*/
public class IndexRequestUtil {
public static List<String> getStoreInfoById(String enterpriseId, String widgetId, boolean isProd) {
String url = "http://gicdev.demogic.com/api-plug/get-store-widget-store-ignorelogin?requestProject=gic-web&currentPage=1&pageSize=" + Integer.MAX_VALUE + "&key=" + widgetId + "&enterpriseId=" + enterpriseId;
if (isProd) {
url = "http://demogic.com/api-plug/get-store-widget-store-ignorelogin?requestProject=gic-web&currentPage=1&pageSize=" + Integer.MAX_VALUE + "&key=" + widgetId + "&enterpriseId=" + enterpriseId;
}
HttpResponse response = HttpClient.getHttpResponseByGet(url);
// System.out.println(HttpClient.getResponseString(response));
JSONObject jsonObject = JSONObject.parseObject(HttpClient.getResponseString(response));
List<String> result = new ArrayList();
try {
JSONArray jsonArray = jsonObject.getJSONObject("result").getJSONArray("result");
for (Object o : jsonArray) {
JSONObject storeInfo = (JSONObject) o;
result.add(storeInfo.getString("storeId"));
}
} catch (Exception e) {
DingtalkMessageUtil.sendAlertMessage("widgetId: " + widgetId + " enterpriseId: " + enterpriseId + " parsing store error " );
}
return result;
}
/*public static List<String> getGoodsInfoById(String enterpriseId ,String selectorId, boolean isProd) {
String url = "http://gicdev.demogic.com/api-plug/get-store-goods-sku-filter?enterpriseId="+enterpriseId+"&id=" + selectorId;
if (isProd) {
url = "http://demogic.com/api-plug/get-store-goods-sku-filter?requestProject=goods&enterpriseId="+enterpriseId+"&id=" + selectorId;
}
try {
org.apache.http.client.HttpClient httpClient = HttpClient.getHttpClient();
HttpResponse response = HttpClient.getHttpResponseByGet(url);
JSONObject jsonObject = JSONObject.parseObject(HttpClient.getResponseString(response));
JSONArray filterConditions = jsonObject.getJSONObject("result").getJSONArray("filterCondition");
System.out.println("filterConditions==>"+filterConditions.toJSONString());
String skuUrl="http://gicdev.demogic.com/api-plug/store-goods-sku-filter?enterpriseId="+enterpriseId;
if(isProd){
skuUrl="http://demogic.com/api-plug/store-goods-sku-filter?enterpriseId="+enterpriseId;
}
HttpPost httpPost = new HttpPost(skuUrl);
httpPost.addHeader("Content-Type", "application/json;charset=UTF-8");
JSONObject requestObject = new JSONObject();
// requestObject.put("requestProject","goods");
requestObject.put("currentPage", 1);
requestObject.put("pageSize", Integer.MAX_VALUE);
requestObject.put("queryList", filterConditions);
requestObject.put("source", 1);
StringEntity stringEntity = new StringEntity(requestObject.toString(), "UTF-8");
stringEntity.setContentEncoding("UTF-8");
httpPost.setEntity(stringEntity);
try {
HttpResponse response1 = httpClient.execute(httpPost);
JSONObject result = JSONObject.parseObject(HttpClient.getResponseString(response1));
JSONArray resultArray = result.getJSONObject("result").getJSONArray("result");
List<String> skuList = new ArrayList<>();
for (Object object : resultArray) {
JSONObject goodsInfo = (JSONObject) object;
// System.out.println(object);
skuList.add(goodsInfo.getString("skuCode"));
}
return skuList;
} catch (IOException e) {
e.printStackTrace();
}
}catch (Exception e){
e.printStackTrace();
DingtalkMessageUtil.sendAlertMessage("selectorId: " + selectorId + " parsing result error " );
}
return null;
}*/
public static List<String> getGoodsInfoById(String enterpriseId ,String selectorId, boolean isProd) {
String url = "http://gicdev.demogic.com/api-plug/get-store-goods-sku-filter?enterpriseId="+enterpriseId+"&id=" + selectorId;
if (isProd) {
url = "http://demogic.com/api-plug/get-store-goods-sku-filter?requestProject=goods&enterpriseId="+enterpriseId+"&id=" + selectorId;
}
try {
HttpResponse response = HttpClient.getHttpResponseByGet(url);
JSONObject jsonObject = JSONObject.parseObject(HttpClient.getResponseString(response));
JSONArray filterConditions = jsonObject.getJSONObject("result").getJSONArray("filterCondition");
System.out.println("filterConditions==>"+filterConditions.toJSONString());
JSONObject resultSku=getSkuCode(enterpriseId , filterConditions, isProd ,1);
int total=resultSku.getJSONObject("result").getIntValue("total");
JSONArray resultArray = resultSku.getJSONObject("result").getJSONArray("result");
List<String> skuList = new ArrayList(total);
for (Object object : resultArray) {
JSONObject goodsInfo = (JSONObject) object;
skuList.add(goodsInfo.getString("skuCode"));
}
int totalPages= (total-1)/1000+1;
if(totalPages>1){
for(int i=2;i<=totalPages;i++){
JSONObject rlt=getSkuCode(enterpriseId , filterConditions, isProd ,i);
if(null!=rlt){
JSONObject rltJSONO= rlt.getJSONObject("result");
if(null!=rltJSONO){
JSONArray rltArray = rltJSONO.getJSONArray("result");
for (Object object : rltArray) {
JSONObject goodsInfo = (JSONObject) object;
skuList.add(goodsInfo.getString("skuCode"));
}
}
}
}
}
return skuList;
}catch (Exception e){
e.printStackTrace();
DingtalkMessageUtil.sendAlertMessage("selectorId: " + selectorId + " parsing result error " );
}
return null;
}
private static JSONObject getSkuCode( String enterpriseId ,JSONArray filterConditions, boolean isProd,int currentPage) throws IOException {
String skuUrl = "http://gicdev.demogic.com/api-plug/store-goods-sku-filter?enterpriseId=" + enterpriseId;
if (isProd) {
skuUrl = "http://demogic.com/api-plug/store-goods-sku-filter?enterpriseId=" + enterpriseId;
}
HttpPost httpPost = new HttpPost(skuUrl);
httpPost.addHeader("Content-Type", "application/json;charset=UTF-8");
JSONObject requestObject = new JSONObject();
requestObject.put("currentPage", currentPage);
requestObject.put("pageSize", Integer.MAX_VALUE);
requestObject.put("queryList", filterConditions);
requestObject.put("source", 1);
StringEntity stringEntity = new StringEntity(requestObject.toString(), "UTF-8");
stringEntity.setContentEncoding("UTF-8");
httpPost.setEntity(stringEntity);
HttpResponse response = HttpClient.getHttpClient().execute(httpPost);
JSONObject result = JSONObject.parseObject(HttpClient.getResponseString(response));
return result;
}
}
package com.gic.spark.util;
import java.util.Collections;
import java.util.List;
/**
* @author Luojs
* @version 1.0
* @Package com.gic.spark.util
* @Description: (比较两个list是否完全相同)
* @date 七月 30 2018 ,15:39
*/
public class ListEqualUtil {
public static boolean isListEqual(List<String> list1,List<String> list2){
if (list1 == list2){
return true;
}
if (null == list1 || null == list2){
return false;
}
if (list1.size() != list2.size()){
return false;
}
for (String l1 : list1) {
if (!list2.contains(l1)){
return false;
}
}
for (String l2 : list2) {
if (!list1.contains(l2)){
return false;
}
}
return true;
}
public static boolean isListEquals(List<String> list1,List<String> list2){
Collections.sort(list1);
Collections.sort(list2);
if (list1.equals(list2)){
return true;
}else {
return false;
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment