Commit 782fe27f by guos

会员标签4.0

parent 1fddb90d
package com.gic.spark.entity;
import java.io.Serializable;
/**
* @description:
* @author: wangxk
* @date: 2019/10/15
*/
public class DataConfig implements Serializable {
public String type;
public DataOption data;
public DataConfig() {
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public DataOption getData() {
return data;
}
public void setData(DataOption data) {
this.data = data;
}
}
package com.gic.spark.entity;
import java.io.Serializable;
/**
* @description:
* @author: wangxk
* @date: 2019/10/15
*/
public class DataOption implements Serializable {
public String key;
public String compute;
public String value;
public String dealKey;
public DataOption() {
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public String getCompute() {
return compute;
}
public void setCompute(String compute) {
this.compute = compute;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public String getDealKey() {
return dealKey;
}
public void setDealKey(String dealKey) {
this.dealKey = dealKey;
}
}
package com.gic.spark.entity;
import java.io.Serializable;
import java.util.List;
/**
* @description:
* @author: wangxk
* @date: 2019/10/15
*/
public class TagValue implements Serializable {
List<DataConfig> val;
List<TimeOption> time;
public TagValue() {
}
public List<DataConfig> getVal() {
return val;
}
public void setVal(List<DataConfig> val) {
this.val = val;
}
public List<TimeOption> getTime() {
return time;
}
public void setTime(List<TimeOption> time) {
this.time = time;
}
}
package com.gic.spark.entity;
import java.io.Serializable;
/**
* @description:
* @author: wangxk
* @date: 2019/10/15
*/
public class TimeOption implements Serializable {
public String type;
public String value;
public TimeOption() {
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
......@@ -17,4 +17,12 @@ public enum DomainType {
DomainType(String value) {
this.value = value;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
package com.gic.spark.entity.enumtype;
/**
* @description:
* @author: wangxk
* @date: 2020/4/26
*/
public enum NumberType {
gt(">"),
gte(">="),
lt("<"),
lte("<="),
eq("="),
between("between");
private String value;
NumberType(String value) {
this.value = value;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
......@@ -7,4 +7,30 @@ package com.gic.spark.entity.enumtype;
*/
public enum TimeRangeType {
/**
* 之后
*/
LATER("later"),
/**
* 最近
*/
LATELY("lately"),
/**
* 固定
*/
FIXATION("fixation");
private String value;
TimeRangeType(String value) {
this.value = value;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
package com.gic.spark.entity.request;
import com.gic.spark.entity.enumtype.DomainType;
import java.io.Serializable;
/**
......@@ -9,4 +11,13 @@ import java.io.Serializable;
*/
public abstract class FilterRequest implements Serializable{
protected DomainType domainType;
public DomainType getDomainType() {
return domainType;
}
public void setDomainType(DomainType domainType) {
this.domainType = domainType;
}
}
package com.gic.spark.entity.request;
import com.gic.spark.entity.enumtype.NumberType;
import com.gic.spark.entity.enumtype.TimeRangeType;
import java.sql.Timestamp;
/**
* @description:
* @author: wangxk
* @date: 2020/4/26
*/
public class IntegralRequest extends FilterRequest {
/**
* 最小数字
*/
private Double beginNum;
/**
* 最大数字
*/
private Double endNum;
/**
* 相等数字
*/
private Double equalNum;
private NumberType numberType;
/**
* 开始时间
*/
private Timestamp beginTime;
/**
* 结束时间
*/
private Timestamp endTime;
/**
* 最近几天时间
*/
private int timeNum;
private TimeRangeType timeRangeType;
public Double getBeginNum() {
return beginNum;
}
public void setBeginNum(Double beginNum) {
this.beginNum = beginNum;
}
public Double getEndNum() {
return endNum;
}
public void setEndNum(Double endNum) {
this.endNum = endNum;
}
public Double getEqualNum() {
return equalNum;
}
public void setEqualNum(Double equalNum) {
this.equalNum = equalNum;
}
public NumberType getNumberType() {
return numberType;
}
public void setNumberType(NumberType numberType) {
this.numberType = numberType;
}
public Timestamp getBeginTime() {
return beginTime;
}
public void setBeginTime(Timestamp beginTime) {
this.beginTime = beginTime;
}
public Timestamp getEndTime() {
return endTime;
}
public void setEndTime(Timestamp endTime) {
this.endTime = endTime;
}
public int getTimeNum() {
return timeNum;
}
public void setTimeNum(int timeNum) {
this.timeNum = timeNum;
}
public TimeRangeType getTimeRangeType() {
return timeRangeType;
}
public void setTimeRangeType(TimeRangeType timeRangeType) {
this.timeRangeType = timeRangeType;
}
}
......@@ -81,4 +81,14 @@ public class TagConstant {
public static final String TAG_CODE_ACCUMULATED_INTEGRAL="accumulatedIntegral_f";//累计获取积分
public static final String TAG_CODE_ABOUT_EXPIRE_INTEGRAL="aboutExpireIntegral";//即将过期积分
public static final String TAG_DOMAIN_ACU_INFO="acuInfo";
public static final String TAG_DOMAIN_SCU_INFO="scuInfo";
public static final String TAG_DOMAIN_MCU_INFO="mcuInfo";
public static final String TAG_KEY_TYPE_TIME_RANGE="timeRange";
public static final String TAG_KEY_TYPE_NUMBER="number";
}
package com.gic.spark.tag;
import com.gic.spark.entity.TagConditionDTO;
import com.gic.spark.entity.TagConditionValDTO;
import com.gic.spark.entity.enumtype.DomainType;
import com.gic.spark.entity.enumtype.TimeRangeType;
import com.gic.spark.entity.request.FilterRequest;
import com.gic.spark.entity.request.IntegralRequest;
import sun.util.resources.es.CalendarData_es;
import java.util.List;
/**
* @description:
......@@ -12,7 +19,55 @@ public class TagValueParser {
public static FilterRequest parseFilterValue(TagConditionDTO conditionDTO){
FilterRequest request=null;
switch (conditionDTO.getTagEsFieldName()){
case TagConstant.TAG_CODE_ACCUMULATED_INTEGRAL:
request=getIntegralRequest(conditionDTO.getTagTemplateElInfo());
break;
case TagConstant.TAG_CODE_ABOUT_EXPIRE_INTEGRAL:
break;
default:break;
}
return request;
}
private static FilterRequest getIntegralRequest(List<TagConditionValDTO> conditionValDTOList) {
IntegralRequest request=new IntegralRequest();
for(TagConditionValDTO conditionValDTO:conditionValDTOList){
String[] keys=conditionValDTO.getKey().split(".");
if(keys.length==1){
setDomainHandle(request,keys[0]);
}else{
switch (keys[0]){
case TagConstant.TAG_KEY_TYPE_TIME_RANGE:
switch (keys[1]){
}
break;
case TagConstant.TAG_KEY_TYPE_NUMBER:
break;
default:break;
}
}
}
return request;
}
private static void setDomainHandle(FilterRequest request,String key){
switch (key){
case TagConstant.TAG_DOMAIN_ACU_INFO:
request.setDomainType(DomainType.ACU_INFO);
break;
case TagConstant.TAG_DOMAIN_SCU_INFO:
request.setDomainType(DomainType.SCU_INFO);
break;
case TagConstant.TAG_DOMAIN_MCU_INFO:
request.setDomainType(DomainType.MCU_INFO);
break;
default:break;
}
}
}
package org.elasticsearch.spark;
/**
* Created by paste on 2018/9/18 16:37
* <p>
* TODO: class description
*/
public class ShardAlg {
public static int shard(String id, int shardNum) {
int hash = hash(id);
return mod(hash, shardNum);
}
public static int mod(int v, int m) {
int r = v % m;
if (r < 0) {
r += m;
}
return r;
}
public static int hash(String routing) {
final byte[] bytesToHash = new byte[routing.length() * 2];
for (int i = 0; i < routing.length(); ++i) {
final char c = routing.charAt(i);
final byte b1 = (byte) c, b2 = (byte) (c >>> 8);
assert ((b1 & 0xFF) | ((b2 & 0xFF) << 8)) == c; // no information loss
bytesToHash[i * 2] = b1;
bytesToHash[i * 2 + 1] = b2;
}
return hash(bytesToHash, 0, bytesToHash.length);
}
public static int hash(byte[] bytes, int offset, int length) {
return murmurhash3_x86_32(bytes, offset, length, 0);
}
public static int murmurhash3_x86_32(byte[] data, int offset, int len, int seed) {
final int c1 = 0xcc9e2d51;
final int c2 = 0x1b873593;
int h1 = seed;
int roundedEnd = offset + (len & 0xfffffffc); // round down to 4 byte block
for (int i=offset; i<roundedEnd; i+=4) {
// little endian load order
int k1 = (data[i] & 0xff) | ((data[i+1] & 0xff) << 8) | ((data[i+2] & 0xff) << 16) | (data[i+3] << 24);
k1 *= c1;
k1 = Integer.rotateLeft(k1, 15);
k1 *= c2;
h1 ^= k1;
h1 = Integer.rotateLeft(h1, 13);
h1 = h1*5+0xe6546b64;
}
// tail
int k1 = 0;
switch(len & 0x03) {
case 3:
k1 = (data[roundedEnd + 2] & 0xff) << 16;
// fallthrough
case 2:
k1 |= (data[roundedEnd + 1] & 0xff) << 8;
// fallthrough
case 1:
k1 |= (data[roundedEnd] & 0xff);
k1 *= c1;
k1 = Integer.rotateLeft(k1, 15);
k1 *= c2;
h1 ^= k1;
}
// finalization
h1 ^= len;
// fmix(h1);
h1 ^= h1 >>> 16;
h1 *= 0x85ebca6b;
h1 ^= h1 >>> 13;
h1 *= 0xc2b2ae35;
h1 ^= h1 >>> 16;
return h1;
}
}
\ No newline at end of file
package org.elasticsearch.spark.rdd;
import org.elasticsearch.spark.rdd.EsRDDWriter;
/**
* Created by paste on 2018/9/18 17:55
* <p>
* TODO: class description
*/
public class EsRddCreator {
public static EsRDDWriter createRddWriter(String setting) {
// return new EsRDDWriter(setting, false);
new EsRDDWriter<String>(setting, false, null);
return null;
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment