supeng 5 dias atrás
pai
commit
adb39ef4ba
54 arquivos alterados com 4250 adições e 3 exclusões
  1. 3 0
      .gitignore
  2. 2 3
      README.md
  3. 194 0
      pom.xml
  4. 15 0
      supply-demand-engine-core/pom.xml
  5. 143 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/aop/LogRequestAop.java
  6. 14 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/annotation/NoRequestLog.java
  7. 18 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/base/CacheKeyConstant.java
  8. 40 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/base/CommonRequest.java
  9. 91 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/base/CommonResponse.java
  10. 30 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/base/Constant.java
  11. 35 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/enums/DataStatusEnum.java
  12. 41 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/enums/ExceptionEnum.java
  13. 10 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/exception/CacheException.java
  14. 97 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/exception/CommonException.java
  15. 11 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/exception/DAOException.java
  16. 50 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/exception/HttpServiceException.java
  17. 45 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/exception/TimeoutException.java
  18. 66 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/config/RedisTemplateConfig.java
  19. 37 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/config/SwaggerConfig.java
  20. 79 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/config/XxlJobConfig.java
  21. 33 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/dao/generator/MybatisGeneratorMain.java
  22. 38 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/filter/AuthFilter.java
  23. 40 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/filter/LogTraceFilter.java
  24. 76 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/handle/GlobalExceptionHandle.java
  25. 170 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/model/dto/BaseInfoDTO.java
  26. 27 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/model/dto/PageDTO.java
  27. 8 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/service/LoghubService.java
  28. 75 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/service/impl/LoghubServiceImpl.java
  29. 261 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/util/DateUtil.java
  30. 41 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/util/IpUtil.java
  31. 67 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/util/ODPSManager.java
  32. 930 0
      supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/util/RedisUtil.java
  33. 53 0
      supply-demand-engine-core/src/main/resources/mybatis-generator-config.xml
  34. 48 0
      supply-demand-engine-job/pom.xml
  35. 31 0
      supply-demand-engine-job/src/main/java/com/tzld/piaoquan/supply/demand/engine/Application.java
  36. 77 0
      supply-demand-engine-job/src/main/resources/application-dev.yml
  37. 77 0
      supply-demand-engine-job/src/main/resources/application-pre.yml
  38. 75 0
      supply-demand-engine-job/src/main/resources/application-prod.yml
  39. 77 0
      supply-demand-engine-job/src/main/resources/application-stress.yml
  40. 75 0
      supply-demand-engine-job/src/main/resources/application-test.yml
  41. 27 0
      supply-demand-engine-job/src/main/resources/application.yml
  42. 193 0
      supply-demand-engine-job/src/main/resources/logback-spring.xml
  43. 13 0
      supply-demand-engine-job/src/test/java/com/tzld/piaoquan/supply/demand/engine/BaseTest.java
  44. 48 0
      supply-demand-engine-service/pom.xml
  45. 31 0
      supply-demand-engine-service/src/main/java/com/tzld/piaoquan/supply/demand/engine/Application.java
  46. 24 0
      supply-demand-engine-service/src/main/java/com/tzld/piaoquan/supply/demand/engine/controller/IndexController.java
  47. 77 0
      supply-demand-engine-service/src/main/resources/application-dev.yml
  48. 77 0
      supply-demand-engine-service/src/main/resources/application-pre.yml
  49. 75 0
      supply-demand-engine-service/src/main/resources/application-prod.yml
  50. 77 0
      supply-demand-engine-service/src/main/resources/application-stress.yml
  51. 75 0
      supply-demand-engine-service/src/main/resources/application-test.yml
  52. 27 0
      supply-demand-engine-service/src/main/resources/application.yml
  53. 193 0
      supply-demand-engine-service/src/main/resources/logback-spring.xml
  54. 13 0
      supply-demand-engine-service/src/test/java/com/tzld/piaoquan/supply/demand/engine/BaseTest.java

+ 3 - 0
.gitignore

@@ -11,4 +11,7 @@
 
 # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
 hs_err_pid*
+.idea/
+
+*target/
 

+ 2 - 3
README.md

@@ -1,3 +1,2 @@
-# supply-demand-engine
-
-供需分析与决策引擎
+# Supply-Demand-Engine
+供需分析与决策引擎

+ 194 - 0
pom.xml

@@ -0,0 +1,194 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+    <modelVersion>4.0.0</modelVersion>
+    <packaging>pom</packaging>
+    <parent>
+        <groupId>com.tzld.commons</groupId>
+        <artifactId>supom</artifactId>
+        <version>1.0.5</version>
+    </parent>
+    <groupId>com.tzld.piaoquan</groupId>
+    <artifactId>supply-demand-engine</artifactId>
+    <version>1.0.0</version>
+    <name>supply-demand-engine</name>
+    <description>supply-demand-engine</description>
+    
+    <modules>
+        <module>supply-demand-engine-core</module>
+        <module>supply-demand-engine-service</module>
+        <module>supply-demand-engine-job</module>
+  	</modules>
+
+    <properties>
+        <lombok.version>1.18.20</lombok.version>
+        <xxl.job.version>2.2.0</xxl.job.version>
+        <aviator.version>5.2.7</aviator.version>
+        <httpclient.version>4.5.13</httpclient.version>
+        <common.lang3.version>3.12.0</common.lang3.version>
+        <spring.kafka.version>2.6.4</spring.kafka.version>
+    </properties>
+
+    <dependencyManagement>
+        <dependencies>
+            <!-- https://mvnrepository.com/artifact/com.googlecode.aviator/aviator -->
+            <dependency>
+                <groupId>com.googlecode.aviator</groupId>
+                <artifactId>aviator</artifactId>
+                <version>${aviator.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.projectlombok</groupId>
+                <artifactId>lombok</artifactId>
+                <version>${lombok.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>com.xuxueli</groupId>
+                <artifactId>xxl-job-core</artifactId>
+                <version>${xxl.job.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.httpcomponents</groupId>
+                <artifactId>httpclient</artifactId>
+                <version>${httpclient.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.commons</groupId>
+                <artifactId>commons-lang3</artifactId>
+                <version>${common.lang3.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.springframework.kafka</groupId>
+                <artifactId>spring-kafka</artifactId>
+                <version>${spring.kafka.version}</version>
+            </dependency>
+        </dependencies>
+    </dependencyManagement>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-web</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-pool2</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-test</artifactId>
+            <scope>test</scope>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.junit.vintage</groupId>
+                    <artifactId>junit-vintage-engine</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>javax.servlet</groupId>
+            <artifactId>javax.servlet-api</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>mysql</groupId>
+            <artifactId>mysql-connector-java</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.mybatis.spring.boot</groupId>
+            <artifactId>mybatis-spring-boot-starter</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.mybatis.generator</groupId>
+            <artifactId>mybatis-generator-core</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.github.pagehelper</groupId>
+            <artifactId>pagehelper-spring-boot-starter</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-data-redis</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>io.springfox</groupId>
+            <artifactId>springfox-swagger2</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>io.springfox</groupId>
+            <artifactId>springfox-swagger-ui</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.alibaba</groupId>
+            <artifactId>fastjson</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.googlecode.aviator</groupId>
+            <artifactId>aviator</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.projectlombok</groupId>
+            <artifactId>lombok</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.xuxueli</groupId>
+            <artifactId>xxl-job-core</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.cloud</groupId>
+            <artifactId>spring-cloud-commons</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.httpcomponents</groupId>
+            <artifactId>httpclient</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-lang3</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.kafka</groupId>
+            <artifactId>spring-kafka</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.cloud</groupId>
+            <artifactId>spring-cloud-starter-netflix-eureka-client</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.cloud</groupId>
+            <artifactId>spring-cloud-starter-openfeign</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.ctrip.framework.apollo</groupId>
+            <artifactId>apollo-client</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.aliyun.odps</groupId>
+            <artifactId>odps-sdk-core</artifactId>
+            <version>0.27.2-public</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.tzld.commons</groupId>
+            <artifactId>aliyun-log-spring-boot-starter</artifactId>
+            <version>2.0.0</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>aliyun-log</artifactId>
+                    <groupId>com.aliyun.openservices</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <dependency>
+            <groupId>com.google.protobuf</groupId>
+            <artifactId>protobuf-java</artifactId>
+            <version>2.5.0</version>
+        </dependency>
+        <dependency>
+            <groupId>com.aliyun.openservices</groupId>
+            <artifactId>aliyun-log-logback-appender</artifactId>
+            <version>0.1.18</version>
+        </dependency>
+
+    </dependencies>
+</project>

+ 15 - 0
supply-demand-engine-core/pom.xml

@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>com.tzld.piaoquan</groupId>
+        <artifactId>supply-demand-engine</artifactId>
+        <version>1.0.0</version>
+    </parent>
+    <artifactId>supply-demand-engine-core</artifactId>
+    <name>supply-demand-engine-core</name>
+    <description>supply-demand-engine-core</description>
+
+</project>

+ 143 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/aop/LogRequestAop.java

@@ -0,0 +1,143 @@
+package com.tzld.piaoquan.supply.demand.engine.aop;
+
+import com.alibaba.fastjson.JSON;
+import com.alibaba.fastjson.JSONObject;
+import com.google.common.base.Strings;
+import com.tzld.commons.aliyun.log.AliyunLogManager;
+import com.tzld.piaoquan.supply.demand.engine.common.base.CommonResponse;
+import com.tzld.piaoquan.supply.demand.engine.common.base.Constant;
+import com.tzld.piaoquan.supply.demand.engine.util.IpUtil;
+import lombok.extern.slf4j.Slf4j;
+import org.aspectj.lang.JoinPoint;
+import org.aspectj.lang.annotation.*;
+import org.slf4j.MDC;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Component;
+import org.springframework.web.context.request.RequestContextHolder;
+import org.springframework.web.context.request.ServletRequestAttributes;
+
+import javax.servlet.http.HttpServletRequest;
+import java.util.*;
+import java.util.stream.Collectors;
+
+
+/**
+ * 请求日志记录
+ *
+ * @author supeng
+ */
+@Slf4j
+@Aspect
+@Component
+public class LogRequestAop {
+
+    private final AliyunLogManager aliyunLogManager;
+    /**
+     * aliyun log 配置
+     */
+    @Value("${aliyun.log.project:}")
+    private String projcet;
+    @Value("${aliyun.log.logstore.request:}")
+    private String logStore;
+    @Value("${aliyun.log.topic:}")
+    private String topic;
+
+    @Autowired
+    public LogRequestAop(AliyunLogManager aliyunLogManager) {
+        this.aliyunLogManager = aliyunLogManager;
+    }
+
+    /**
+     * 切入点
+     */
+    @Pointcut("execution(public * com.tzld.piaoquan.supply.demand.engine..*Controller.*(..)) && !@annotation(com.tzld.piaoquan.supply.demand.engine.common.annotation.NoRequestLog)")
+    public void requestLog() {
+    }
+
+    /**
+     * 前置操作
+     */
+    @Before("requestLog()")
+    public void beforeLog() {
+        ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
+        HttpServletRequest request = Objects.requireNonNull(attributes).getRequest();
+
+        request.setAttribute(Constant.REQUEST_START_TIME, System.currentTimeMillis());
+    }
+
+    /**
+     * 后置操作
+     */
+    @AfterReturning(pointcut = "requestLog()", returning = "returnValue")
+    public void afterReturning(JoinPoint point, Object returnValue) {
+        logRecord(point, JSON.toJSONString(returnValue));
+    }
+
+    @AfterThrowing(pointcut = "requestLog()", throwing = "ex")
+    public void afterThrowing(JoinPoint point, Exception ex) {
+        logRecord(point, ex.toString());
+    }
+
+    private void logRecord(JoinPoint point, String message) {
+        try {
+            ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
+            HttpServletRequest request = Objects.requireNonNull(attributes).getRequest();
+
+            Long startTime = (Long) request.getAttribute(Constant.REQUEST_START_TIME);
+            if (Objects.isNull(startTime)) {
+                return;
+            }
+
+            String query = request.getQueryString();
+            String uri = request.getRequestURI();
+
+            Map<String, Object> logMap = new HashMap<>(16);
+            logMap.put("url", Strings.isNullOrEmpty(query) ? uri : uri + "?" + query);
+            logMap.put("method", request.getMethod());
+            logMap.put("header", getHeaders(request));
+            logMap.put("elapsedTime", String.valueOf(System.currentTimeMillis() - startTime));
+            logMap.put("clientIp", IpUtil.getIpAddr(request));
+            logMap.put("requestBody", getRequestBody(point.getArgs()));
+            logMap.put(Constant.LOG_TRACE_ID, Strings.nullToEmpty(MDC.get(Constant.LOG_TRACE_ID)));
+            logMap.put("responseBody", message);
+            try {
+                if (Objects.nonNull(message)) {
+                    CommonResponse commonResponse = JSONObject.parseObject(message, CommonResponse.class);
+                    if (Objects.nonNull(commonResponse)) {
+                        logMap.put("responseCode", commonResponse.getCode());
+                        if (commonResponse.getData() instanceof List) {
+                            if (Objects.isNull(commonResponse.getData())) {
+                                logMap.put("responseDataCount", 0);
+                            } else {
+                                logMap.put("responseDataCount", ((List<?>) commonResponse.getData()).size());
+                            }
+                        }
+                    }
+                }
+            } catch (Exception e) {
+                log.error("log report response message error", e);
+            }
+
+            aliyunLogManager.sendLog(projcet, logStore, topic, logMap);
+        } catch (Exception e) {
+            log.error("log report request error", e);
+        }
+    }
+
+    private Object getRequestBody(Object[] args) {
+        try {
+            return JSON.toJSONString(args[0]);
+        } catch (Exception e) {
+            return args;
+        }
+    }
+
+    private String getHeaders(HttpServletRequest request) {
+        return Collections.list(request.getHeaderNames())
+                .stream()
+                .collect(Collectors.toMap(
+                        name -> name,
+                        request::getHeader)).toString();
+    }
+}

+ 14 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/annotation/NoRequestLog.java

@@ -0,0 +1,14 @@
+package com.tzld.piaoquan.supply.demand.engine.common.annotation;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * @author supeng
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.METHOD)
+public @interface NoRequestLog {
+}

+ 18 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/base/CacheKeyConstant.java

@@ -0,0 +1,18 @@
+package com.tzld.piaoquan.supply.demand.engine.common.base;
+
+/**
+ * 缓存Key
+ *
+ * @author supeng
+ */
+public class CacheKeyConstant {
+    public static final long ONE_HOUR_SECOND = 3600;
+    public static final long SIX_HOUR_SECOND = 6* ONE_HOUR_SECOND;
+    public static final long ONE_HOUR_MILLS = ONE_HOUR_SECOND * 1000;
+    public static final long ONE_DAY_SECOND = ONE_HOUR_SECOND * 24;
+    public static final long ONE_DAY_MILLS = ONE_DAY_SECOND * 1000;
+    public static final long ONE_WEEK_SECOND = ONE_DAY_SECOND * 7;
+    public static final long ONE_WEEK_MILLS = ONE_WEEK_SECOND * 1000;
+    public static final long ONE_MOUTH_SECOND = ONE_DAY_SECOND * 30;
+    public static final long ONE_MOUTH_MILLS = ONE_MOUTH_SECOND * 1000;
+}

+ 40 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/base/CommonRequest.java

@@ -0,0 +1,40 @@
+package com.tzld.piaoquan.supply.demand.engine.common.base;
+
+import com.tzld.piaoquan.supply.demand.engine.model.dto.BaseInfoDTO;
+
+/**
+ * 请求参数
+ *
+ * @author supeng
+ */
+public class CommonRequest<T> {
+    /**
+     * 基础信息
+     */
+    BaseInfoDTO baseInfo;
+    /**
+     * 请求参数
+     */
+    T params;
+
+    public BaseInfoDTO getBaseInfo() {
+        return baseInfo;
+    }
+
+    public void setBaseInfo(BaseInfoDTO baseInfo) {
+        this.baseInfo = baseInfo;
+    }
+
+    public T getParams() {
+        return params;
+    }
+
+    public void setParams(T params) {
+        this.params = params;
+    }
+
+    @Override
+    public String toString() {
+        return "CommonRequest{" + "baseInfo=" + baseInfo + ", params=" + params + '}';
+    }
+}

+ 91 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/base/CommonResponse.java

@@ -0,0 +1,91 @@
+package com.tzld.piaoquan.supply.demand.engine.common.base;
+
+/**
+ * Common Response
+ */
+public class CommonResponse<T> {
+
+    private static final int SUCCESS_CODE = 0;
+    private static final String SUCCESS_MSG = "success";
+
+    /** 返回状态码,0 表示业务成功 */
+    private int code = 0;
+    /** 返回消息 */
+    private String msg = SUCCESS_MSG;
+    /** 业务成功时返回数据 */
+    private T data;
+    /** 重定向 */
+    private String redirect;
+
+    public boolean isSuccess() {
+        return this.code == SUCCESS_CODE;
+    }
+
+    public static <T> CommonResponse<T> success() {
+        CommonResponse<T> commonResponse = new CommonResponse<>();
+        commonResponse.setCode(SUCCESS_CODE);
+        commonResponse.setMsg(SUCCESS_MSG);
+        return commonResponse;
+    }
+
+    public static <T> CommonResponse<T> success(T data) {
+        CommonResponse<T> commonResponse = new CommonResponse<>();
+        commonResponse.setCode(SUCCESS_CODE);
+        commonResponse.setMsg(SUCCESS_MSG);
+        commonResponse.setData(data);
+        return commonResponse;
+    }
+
+    public static <T> CommonResponse<T> create() {
+        return create(SUCCESS_CODE, SUCCESS_MSG, null);
+    }
+    
+    public static <T> CommonResponse<T> create(T data) {
+        return create(SUCCESS_CODE, SUCCESS_MSG, data);
+    }
+
+    public static <T> CommonResponse<T> create(int code, String msg) {
+        return create(code, msg, null);
+    }
+
+    public static <T> CommonResponse<T> create(int code, String msg, T data) {
+        CommonResponse<T> commonResponse = new CommonResponse<>();
+        commonResponse.setCode(code);
+        commonResponse.setMsg(msg);
+        commonResponse.setData(data);
+        return commonResponse;
+    }
+
+    public int getCode() {
+        return code;
+    }
+
+    public void setCode(int code) {
+        this.code = code;
+    }
+
+    public String getMsg() {
+        return msg;
+    }
+
+    public void setMsg(String msg) {
+        this.msg = msg;
+    }
+
+    public T getData() {
+        return data;
+    }
+
+    public void setData(T data) {
+        this.data = data;
+    }
+
+    public String getRedirect() {
+        return redirect;
+    }
+
+    public void setRedirect(String redirect) {
+        this.redirect = redirect;
+    }
+
+}

+ 30 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/base/Constant.java

@@ -0,0 +1,30 @@
+package com.tzld.piaoquan.supply.demand.engine.common.base;
+
+/**
+ * 常量
+ *
+ * @author supeng
+ */
+public class Constant {
+    /**
+     * traceID
+     */
+    public static final String LOG_TRACE_ID = "logTraceId";
+
+    /**
+     * 下划线
+     */
+    public static final String LINE = "_";
+    /**
+     * 1天 毫秒数
+     */
+    public static final long ONE_DAY_MILLS = 24 * 60 * 60 * 1000;
+    /**
+     * 1小时 毫秒
+     */
+    public static final long ONE_HOURE_MILLS = 60 * 60 * 1000;
+    /**
+     * 接口请求开始时间戳
+     */
+    public static final String REQUEST_START_TIME = "request_start_time";
+}

+ 35 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/enums/DataStatusEnum.java

@@ -0,0 +1,35 @@
+package com.tzld.piaoquan.supply.demand.engine.common.enums;
+
+/**
+ * 数据状态
+ *
+ * @author supeng
+ * @Date 2020/7/31
+ */
+public enum DataStatusEnum {
+    INVALID(0, "无效"),
+    VALID(1, "有效");
+    private Integer value;
+    private String desc;
+
+    DataStatusEnum(int value, String desc) {
+        this.value = value;
+        this.desc = desc;
+    }
+
+    public Integer getValue() {
+        return value;
+    }
+
+    public void setValue(Integer value) {
+        this.value = value;
+    }
+
+    public String getDesc() {
+        return desc;
+    }
+
+    public void setDesc(String desc) {
+        this.desc = desc;
+    }
+}

+ 41 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/enums/ExceptionEnum.java

@@ -0,0 +1,41 @@
+package com.tzld.piaoquan.supply.demand.engine.common.enums;
+
+/**
+ * 异常
+ *
+ * @author supeng
+ * @date 2020/08/31
+ */
+public enum ExceptionEnum {
+
+    SUCCESS(0, "成功"),
+    SYSTEM_ERROR(1, "系统错误"),
+    PARAMS_INVALID(2, "参数错误"),
+    DATA_NOT_EXIST(3, "数据不存在"),
+    DATA_ERROR(4, "数据错误"),
+    EXIST_RELATED_DATA(5, "存在关联数据"),
+    DATA_EXIST(6, "数据已存在"),
+    ILLEGAL_OPERATION (7, "非法操作"),
+    CONFIG_ERROR (8, "配置异常"),
+    FLOW_POOL_AUTO_ENTER_ERROR (9, "只能同时一个流量池打开【自动入池】开关"),
+    FLOW_POOL_1_AUTO_ENTER_HOLD (10, "流量池【id=1】,作为尾号流量池的开关切换策略,需要保持【自动入池】状态"),
+    FLOW_POOL_AUTO_ENTER_EXCEPT_SUPPLY (11, "除了供给池,只能同时一个流量池打开【自动入池】开关"),
+    ;
+
+    private int code;
+    private String msg;
+
+    public int getCode() {
+        return code;
+    }
+
+
+    public String getMsg() {
+        return msg;
+    }
+
+    ExceptionEnum(int code, String msg) {
+        this.code = code;
+        this.msg = msg;
+    }
+}

+ 10 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/exception/CacheException.java

@@ -0,0 +1,10 @@
+package com.tzld.piaoquan.supply.demand.engine.common.exception;
+
+/**
+ * 缓存异常
+ *
+ * @author supeng
+ */
+public class CacheException extends RuntimeException {
+
+}

+ 97 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/exception/CommonException.java

@@ -0,0 +1,97 @@
+package com.tzld.piaoquan.supply.demand.engine.common.exception;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.tzld.piaoquan.supply.demand.engine.common.enums.ExceptionEnum;
+
+/**
+ * 异常
+ *
+ * @author supeng
+ */
+public class CommonException extends RuntimeException {
+
+    /**
+     *
+     */
+    private static final long serialVersionUID = 1L;
+
+    private static final Logger LOGGER = LoggerFactory.getLogger(CommonException.class);
+    /**
+     * 异常
+     */
+    private ExceptionEnum exceptionEnum;
+    /**
+     * 错误码
+     */
+    private int code;
+    /**
+     * 异常信息
+     */
+    private String msg;
+
+
+    public int getCode() {
+        return code;
+    }
+
+    public void setCode(int code) {
+        this.code = code;
+    }
+
+    public String getMsg() {
+        return msg;
+    }
+
+    public void setMsg(String msg) {
+        this.msg = msg;
+    }
+
+    public CommonException(Throwable throwable) {
+        super(throwable);
+    }
+
+    public CommonException(int code, String msg) {
+        super(msg);
+        this.code = code;
+        this.msg = msg;
+    }
+
+    public CommonException(ExceptionEnum exceptionEnum) {
+        super(exceptionEnum.getMsg());
+        this.exceptionEnum = exceptionEnum;
+        this.code = exceptionEnum.getCode();
+        this.msg = exceptionEnum.getMsg();
+    }
+
+    public CommonException(ExceptionEnum exceptionEnum, String msg) {
+        super(msg);
+        this.exceptionEnum = exceptionEnum;
+        this.code = exceptionEnum.getCode();
+        this.msg = msg;
+    }
+
+
+    public CommonException(int code, String msg, Throwable throwable) {
+        super(msg, throwable);
+        this.code = code;
+        this.msg = msg;
+    }
+
+    public CommonException(ExceptionEnum exceptionEnum, Throwable throwable) {
+        super(exceptionEnum.getMsg(), throwable);
+        this.exceptionEnum = exceptionEnum;
+        this.code = exceptionEnum.getCode();
+        this.msg = exceptionEnum.getMsg();
+    }
+
+
+    @Override
+    public void printStackTrace() {
+        if (exceptionEnum != null) {
+            LOGGER.info("exception code = {}, msg = {}", exceptionEnum.getCode(), exceptionEnum.getMsg());
+        }
+        LOGGER.info("exception code = {}, msg = {}", code, msg);
+    }
+}

+ 11 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/exception/DAOException.java

@@ -0,0 +1,11 @@
+package com.tzld.piaoquan.supply.demand.engine.common.exception;
+
+/**
+ * DAO异常
+ *
+ * @author supeng
+ * @date 2020/08/19
+ */
+public class DAOException extends RuntimeException{
+
+}

+ 50 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/exception/HttpServiceException.java

@@ -0,0 +1,50 @@
+package com.tzld.piaoquan.supply.demand.engine.common.exception;
+
+import lombok.Getter;
+import lombok.Setter;
+
+/**
+ * Http 异常
+ * @author supeng
+ */
+@Setter
+public class HttpServiceException extends RuntimeException {
+
+    @Getter
+    private int code;
+    private String message;
+
+    @Override
+    public String getMessage() {
+        return message;
+    }
+
+    public HttpServiceException(int code, String message) {
+        this.code = code;
+        this.message = message;
+    }
+
+    public HttpServiceException(String message, int code, String message1) {
+        super(message);
+        this.code = code;
+        this.message = message1;
+    }
+
+    public HttpServiceException(String message, Throwable cause, int code, String message1) {
+        super(message, cause);
+        this.code = code;
+        this.message = message1;
+    }
+
+    public HttpServiceException(Throwable cause, int code, String message) {
+        super(cause);
+        this.code = code;
+        this.message = message;
+    }
+
+    public HttpServiceException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace, int code, String message1) {
+        super(message, cause, enableSuppression, writableStackTrace);
+        this.code = code;
+        this.message = message1;
+    }
+}

+ 45 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/common/exception/TimeoutException.java

@@ -0,0 +1,45 @@
+package com.tzld.piaoquan.supply.demand.engine.common.exception;
+
+
+import lombok.Setter;
+
+/**
+ * @author supeng
+ */
+@Setter
+public class TimeoutException extends RuntimeException {
+
+    private String message;
+
+    @Override
+    public String getMessage() {
+        return message;
+    }
+
+    public TimeoutException() {
+    }
+
+    public TimeoutException(String message) {
+        this.message = message;
+    }
+
+    public TimeoutException(String message, String message1) {
+        super(message);
+        this.message = message1;
+    }
+
+    public TimeoutException(String message, Throwable cause, String message1) {
+        super(message, cause);
+        this.message = message1;
+    }
+
+    public TimeoutException(Throwable cause, String message) {
+        super(cause);
+        this.message = message;
+    }
+
+    public TimeoutException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace, String message1) {
+        super(message, cause, enableSuppression, writableStackTrace);
+        this.message = message1;
+    }
+}

+ 66 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/config/RedisTemplateConfig.java

@@ -0,0 +1,66 @@
+package com.tzld.piaoquan.supply.demand.engine.config;
+
+import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Primary;
+import org.springframework.data.redis.connection.RedisConnectionFactory;
+import org.springframework.data.redis.connection.RedisStandaloneConfiguration;
+import org.springframework.data.redis.connection.lettuce.LettuceClientConfiguration;
+import org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory;
+import org.springframework.data.redis.connection.lettuce.LettucePoolingClientConfiguration;
+import org.springframework.data.redis.core.RedisTemplate;
+import org.springframework.data.redis.serializer.StringRedisSerializer;
+
+/**
+ * redis config
+ *
+ * @author supeng
+ */
+@Configuration
+public class RedisTemplateConfig {
+
+    @Bean
+    @ConfigurationProperties(prefix = "spring.redis.lettuce.pool")
+    public GenericObjectPoolConfig<LettucePoolingClientConfiguration> redisPool() {
+        return new GenericObjectPoolConfig<>();
+    }
+
+    @Bean
+    @ConfigurationProperties(prefix = "spring.redis")
+    public RedisStandaloneConfiguration redisConfig() {
+        return new RedisStandaloneConfiguration();
+    }
+
+    @Bean("factory")
+    @Primary
+    public LettuceConnectionFactory factory(GenericObjectPoolConfig<LettucePoolingClientConfiguration> config, RedisStandaloneConfiguration redisConfig) {
+        LettuceClientConfiguration lettuceClientConfiguration = LettucePoolingClientConfiguration.builder().poolConfig(config).build();
+        return new LettuceConnectionFactory(redisConfig, lettuceClientConfiguration);
+    }
+
+    @Bean(name = "redisTemplate")
+    public RedisTemplate<String, String> getRedisTemplate(@Qualifier("factory") RedisConnectionFactory factory) {
+        return buildRedisTemplate(factory);
+    }
+
+    /**
+     * 构建redisTemplate 使用string序列化
+     *
+     * @param factory
+     * @return
+     */
+    public RedisTemplate<String, String> buildRedisTemplate(RedisConnectionFactory factory) {
+        RedisTemplate<String, String> redisTemplate = new RedisTemplate<>();
+        redisTemplate.setConnectionFactory(factory);
+        // key的序列化类型 保证可读性
+        redisTemplate.setKeySerializer(new StringRedisSerializer());
+        redisTemplate.setValueSerializer(new StringRedisSerializer());
+        redisTemplate.setHashKeySerializer(new StringRedisSerializer());
+        redisTemplate.setHashValueSerializer(new StringRedisSerializer());
+        return redisTemplate;
+    }
+
+}

+ 37 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/config/SwaggerConfig.java

@@ -0,0 +1,37 @@
+package com.tzld.piaoquan.supply.demand.engine.config;
+
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Profile;
+import springfox.documentation.builders.ApiInfoBuilder;
+import springfox.documentation.builders.PathSelectors;
+import springfox.documentation.builders.RequestHandlerSelectors;
+import springfox.documentation.service.ApiInfo;
+import springfox.documentation.spi.DocumentationType;
+import springfox.documentation.spring.web.plugins.Docket;
+
+/**
+ * swagger2 配置
+ * @author supeng
+ */
+@Configuration
+@Profile({"dev","test"})
+public class SwaggerConfig {
+    @Bean
+    public Docket createRestApi() {
+        return new Docket(DocumentationType.SWAGGER_2)
+                .apiInfo(apiInfo())
+                .select()
+                .apis(RequestHandlerSelectors.basePackage("com.tzld.piaoquan.supply.demand.engine.controller"))
+                .paths(PathSelectors.any())
+                .build();
+    }
+
+    private ApiInfo apiInfo() {
+        return new ApiInfoBuilder()
+                .title("supply-demand-engine swagger api")
+                .description("supply-demand-engine swagger api")
+                .version("1.0")
+                .build();
+    }
+}

+ 79 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/config/XxlJobConfig.java

@@ -0,0 +1,79 @@
+package com.tzld.piaoquan.supply.demand.engine.config;
+
+import com.xxl.job.core.executor.impl.XxlJobSpringExecutor;
+import lombok.extern.slf4j.Slf4j;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.cloud.commons.util.InetUtils;
+import org.springframework.cloud.commons.util.InetUtilsProperties;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+/**
+ * xxl job config
+ *
+ * @author supeng
+ */
+@Slf4j
+@Configuration
+public class XxlJobConfig {
+
+    @Value("${xxl.job.admin.addresses}")
+    private String adminAddresses;
+
+    @Value("${xxl.job.accessToken}")
+    private String accessToken;
+
+    @Value("${xxl.job.executor.appname}")
+    private String appname;
+
+    @Value("${xxl.job.executor.address}")
+    private String address;
+
+    @Value("${xxl.job.executor.ip}")
+    private String ip;
+
+    @Value("${xxl.job.executor.port}")
+    private int port;
+
+    @Value("${xxl.job.executor.logpath}")
+    private String logPath;
+
+    @Value("${xxl.job.executor.logretentiondays}")
+    private int logRetentionDays;
+
+
+    @Bean
+    public XxlJobSpringExecutor xxlJobExecutor() {
+        log.info("xxl-job config init");
+        XxlJobSpringExecutor xxlJobSpringExecutor = new XxlJobSpringExecutor();
+        xxlJobSpringExecutor.setAdminAddresses(adminAddresses);
+        xxlJobSpringExecutor.setAppname(appname);
+        xxlJobSpringExecutor.setAddress(address);
+        InetUtils inetUtils = new InetUtils(new InetUtilsProperties());
+        /**
+         * 针对多网卡、容器内部署等情况,可借助 "spring-cloud-commons" 提供的 "InetUtils" 组件灵活定制注册IP;
+         *
+         *      1、引入依赖:
+         *          <dependency>
+         *             <groupId>org.springframework.cloud</groupId>
+         *             <artifactId>spring-cloud-commons</artifactId>
+         *             <version>${version}</version>
+         *         </dependency>
+         *
+         *      2、配置文件,或者容器启动变量
+         *          spring.cloud.inetutils.preferred-networks: 'xxx.xxx.xxx.'
+         *
+         *      3、获取IP
+         *          String ip_ = inetUtils.findFirstNonLoopbackHostInfo().getIpAddress();
+         */
+        String ipAddress = inetUtils.findFirstNonLoopbackHostInfo().getIpAddress();
+        xxlJobSpringExecutor.setIp(ipAddress);
+        xxlJobSpringExecutor.setPort(port);
+        xxlJobSpringExecutor.setAccessToken(accessToken);
+        xxlJobSpringExecutor.setLogPath(logPath);
+        xxlJobSpringExecutor.setLogRetentionDays(logRetentionDays);
+        return xxlJobSpringExecutor;
+    }
+}

+ 33 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/dao/generator/MybatisGeneratorMain.java

@@ -0,0 +1,33 @@
+package com.tzld.piaoquan.supply.demand.engine.dao.generator;
+
+import java.io.File;
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.mybatis.generator.api.MyBatisGenerator;
+import org.mybatis.generator.config.Configuration;
+import org.mybatis.generator.config.xml.ConfigurationParser;
+import org.mybatis.generator.exception.InvalidConfigurationException;
+import org.mybatis.generator.exception.XMLParserException;
+import org.mybatis.generator.internal.DefaultShellCallback;
+
+/**
+ * @author supeng
+ */
+public class MybatisGeneratorMain {
+
+	public static void main(String[] args)
+			throws SQLException, IOException, InterruptedException, InvalidConfigurationException, XMLParserException {
+		List<String> warnings = new ArrayList<String>();
+		boolean overwrite = true;
+		File configFile = new File(MybatisGeneratorMain.class.getResource("/mybatis-generator-config.xml").getFile());
+		ConfigurationParser cp = new ConfigurationParser(warnings);
+		Configuration config = cp.parseConfiguration(configFile);
+		DefaultShellCallback callback = new DefaultShellCallback(overwrite);
+		MyBatisGenerator myBatisGenerator = new MyBatisGenerator(config, callback, warnings);
+		myBatisGenerator.generate(null);
+		System.out.println("generate finish");
+	}
+}

+ 38 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/filter/AuthFilter.java

@@ -0,0 +1,38 @@
+package com.tzld.piaoquan.supply.demand.engine.filter;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.core.annotation.Order;
+
+import javax.servlet.*;
+import javax.servlet.annotation.WebFilter;
+import java.io.IOException;
+
+/**
+ * auth filter
+ * 暂时没用
+ *
+ * @author supeng
+ */
+//@Order(value = 2)
+//@WebFilter(filterName = "authFilter", urlPatterns = "/api")
+public class AuthFilter implements Filter {
+    private static final Logger LOGGER = LoggerFactory.getLogger(AuthFilter.class);
+
+    @Override
+    public void init(FilterConfig filterConfig) throws ServletException {
+        LOGGER.info("init");
+
+    }
+
+    @Override
+    public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException {
+        //TODO token校验
+        filterChain.doFilter(servletRequest, servletResponse);
+    }
+
+    @Override
+    public void destroy() {
+        LOGGER.info("destroy");
+    }
+}

+ 40 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/filter/LogTraceFilter.java

@@ -0,0 +1,40 @@
+package com.tzld.piaoquan.supply.demand.engine.filter;
+
+import com.tzld.piaoquan.supply.demand.engine.common.base.Constant;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
+import org.springframework.core.annotation.Order;
+
+import javax.servlet.*;
+import javax.servlet.annotation.WebFilter;
+import java.io.IOException;
+import java.util.UUID;
+
+/**
+ * 日志增加traceId
+ *
+ * @author supeng
+ */
+@Order(value = 1)
+@WebFilter(filterName = "LogTraceFilter", urlPatterns = "/*")
+public class LogTraceFilter implements Filter {
+    private static final Logger LOGGER = LoggerFactory.getLogger(LogTraceFilter.class);
+
+    @Override
+    public void init(FilterConfig filterConfig) {
+        LOGGER.info("init");
+    }
+
+    @Override
+    public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException {
+        MDC.put(Constant.LOG_TRACE_ID, UUID.randomUUID().toString());
+        filterChain.doFilter(servletRequest, servletResponse);
+        MDC.remove(Constant.LOG_TRACE_ID);
+    }
+
+    @Override
+    public void destroy() {
+        LOGGER.info("destroy");
+    }
+}

+ 76 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/handle/GlobalExceptionHandle.java

@@ -0,0 +1,76 @@
+package com.tzld.piaoquan.supply.demand.engine.handle;
+
+import java.util.List;
+
+import javax.servlet.http.HttpServletRequest;
+
+import com.tzld.piaoquan.supply.demand.engine.common.base.CommonResponse;
+import lombok.extern.slf4j.Slf4j;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.util.CollectionUtils;
+import org.springframework.validation.BindException;
+import org.springframework.validation.ObjectError;
+import org.springframework.web.bind.MethodArgumentNotValidException;
+import org.springframework.web.bind.annotation.ExceptionHandler;
+import org.springframework.web.bind.annotation.RestControllerAdvice;
+
+import com.tzld.piaoquan.supply.demand.engine.common.enums.ExceptionEnum;
+import com.tzld.piaoquan.supply.demand.engine.common.exception.CommonException;
+
+/**
+ * 全局异常处理器
+ * 
+ * @author supeng
+ */
+@Slf4j
+@RestControllerAdvice
+public class GlobalExceptionHandle {
+
+    @ExceptionHandler
+    public Object handleException(HttpServletRequest req, Exception exception) throws Exception {
+        String uri = req.getRequestURI();
+        CommonResponse<Object> response = new CommonResponse<Object>();
+        // 业务异常
+        if (exception instanceof CommonException) {
+            CommonException e = (CommonException) exception;
+            response.setCode(e.getCode());
+            response.setMsg(e.getMsg());
+            log.warn("uri:" + uri + "\n" + "CustomException log.", exception);
+        } else if (exception instanceof MethodArgumentNotValidException) {
+            // 参数校验异常
+            MethodArgumentNotValidException e = (MethodArgumentNotValidException) exception;
+            List<ObjectError> errorList = e.getBindingResult().getAllErrors();
+            StringBuilder errorMsg = new StringBuilder();
+            errorMsg.append("|");
+            if (!CollectionUtils.isEmpty(errorList)) {
+                for (ObjectError objectError : errorList) {
+                    errorMsg.append(objectError.getDefaultMessage()).append("|");
+                }
+            }
+            response.setCode(ExceptionEnum.PARAMS_INVALID.getCode());
+            response.setMsg(errorMsg.toString());
+            log.warn("uri:" + uri + "\n" + "MethodArgumentNotValidException log.", exception);
+        } else if (exception instanceof BindException) {
+            // 参数绑定异常
+            BindException e = (BindException) exception;
+            List<ObjectError> errorList = e.getBindingResult().getAllErrors();
+            StringBuilder errorMsg = new StringBuilder();
+            errorMsg.append("|");
+            if (!CollectionUtils.isEmpty(errorList)) {
+                for (ObjectError objectError : errorList) {
+                    errorMsg.append(objectError.getDefaultMessage()).append("|");
+                }
+            }
+            response.setCode(ExceptionEnum.PARAMS_INVALID.getCode());
+            response.setMsg(errorMsg.toString());
+            log.warn("uri:" + uri + "\n" + "BindException log.", exception);
+        } else {
+            response.setCode(ExceptionEnum.SYSTEM_ERROR.getCode());
+            response.setMsg(ExceptionEnum.SYSTEM_ERROR.getMsg());
+            log.error("uri:" + uri + "\n" + "unknow exception log.", exception);
+        }
+        return response;
+    }
+
+}

+ 170 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/model/dto/BaseInfoDTO.java

@@ -0,0 +1,170 @@
+package com.tzld.piaoquan.supply.demand.engine.model.dto;
+
+import io.swagger.annotations.ApiModelProperty;
+
+/**
+ * 基础信息
+ *
+ * @author supeng
+ * @date 2020/08/28
+ */
+public class BaseInfoDTO {
+    /// 用户信息
+    @ApiModelProperty(value = "公共参数-token值")
+    private String token;
+    @ApiModelProperty(value = "公共参数-登录用户ID")
+    private Long loginUid;
+
+    /// 应用信息
+    @ApiModelProperty(value = "公共参数-应用版本号")
+    private Integer appVersionCode;
+    @ApiModelProperty(value = "公共参数-产品代号")
+    private Integer appType;
+
+    /// 设备信息
+    @ApiModelProperty(value = "公共参数-手机设备的唯一码")
+    private String machineCode;
+    @ApiModelProperty(value = "公共参数-ios,android")
+    private String platform;
+    @ApiModelProperty(value = "公共参数-系统版本(例:ios10.1)")
+    private String systemVersion;
+    @ApiModelProperty(value = "公共参数-手机信息")
+    private String machineInfo;
+    @ApiModelProperty(value = "公共参数-网络类型 WI-FI 5G 4G 3G 2G")
+    private String networkType;
+    @ApiModelProperty(value = "公共参数-客户端ip")
+    private String clientIp;
+
+    // pageSource相关的参数
+    @ApiModelProperty(value = "公共参数-页面来源")
+    private String pageSource;
+
+    // 某次操作相关的参数
+    @ApiModelProperty(value = "公共参数-前端请求时间")
+    private Long clientTimestamp;
+    @ApiModelProperty(value = "公共参数-sessionId")
+    private String sessionId;
+    @ApiModelProperty(value = "公共参数-requestId,每次请求客户端生成唯一ID,不超过64位")
+    private String requestId;
+
+    public String getToken() {
+        return token;
+    }
+
+    public void setToken(String token) {
+        this.token = token;
+    }
+
+    public Long getLoginUid() {
+        return loginUid;
+    }
+
+    public void setLoginUid(Long loginUid) {
+        this.loginUid = loginUid;
+    }
+
+    public Integer getAppVersionCode() {
+        return appVersionCode;
+    }
+
+    public void setAppVersionCode(Integer appVersionCode) {
+        this.appVersionCode = appVersionCode;
+    }
+
+    public Integer getAppType() {
+        return appType;
+    }
+
+    public void setAppType(Integer appType) {
+        this.appType = appType;
+    }
+
+    public String getMachineCode() {
+        return machineCode;
+    }
+
+    public void setMachineCode(String machineCode) {
+        this.machineCode = machineCode;
+    }
+
+    public String getPlatform() {
+        return platform;
+    }
+
+    public void setPlatform(String platform) {
+        this.platform = platform;
+    }
+
+    public String getSystemVersion() {
+        return systemVersion;
+    }
+
+    public void setSystemVersion(String systemVersion) {
+        this.systemVersion = systemVersion;
+    }
+
+    public String getMachineInfo() {
+        return machineInfo;
+    }
+
+    public void setMachineInfo(String machineInfo) {
+        this.machineInfo = machineInfo;
+    }
+
+    public String getNetworkType() {
+        return networkType;
+    }
+
+    public void setNetworkType(String networkType) {
+        this.networkType = networkType;
+    }
+
+    public String getClientIp() {
+        return clientIp;
+    }
+
+    public void setClientIp(String clientIp) {
+        this.clientIp = clientIp;
+    }
+
+    public String getPageSource() {
+        return pageSource;
+    }
+
+    public void setPageSource(String pageSource) {
+        this.pageSource = pageSource;
+    }
+
+    public Long getClientTimestamp() {
+        return clientTimestamp;
+    }
+
+    public void setClientTimestamp(Long clientTimestamp) {
+        this.clientTimestamp = clientTimestamp;
+    }
+
+    public String getSessionId() {
+        return sessionId;
+    }
+
+    public void setSessionId(String sessionId) {
+        this.sessionId = sessionId;
+    }
+
+    public String getRequestId() {
+        return requestId;
+    }
+
+    public void setRequestId(String requestId) {
+        this.requestId = requestId;
+    }
+
+    @Override
+    public String toString() {
+        return String.format(
+            "BaseInfoDTO [token=%s, appVersionCode=%s, appType=%s, machineCode=%s, platform=%s, systemVersion=%s, machineInfo=%s, networkType=%s, clientIp=%s, pageSource=%s, clientTimestamp=%s, sessionId=%s, requestId=%s]",
+            token, appVersionCode, appType, machineCode, platform, systemVersion, machineInfo, networkType, clientIp,
+            pageSource, clientTimestamp, sessionId, requestId);
+    }
+
+}

+ 27 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/model/dto/PageDTO.java

@@ -0,0 +1,27 @@
+package com.tzld.piaoquan.supply.demand.engine.model.dto;
+
+/**
+ * @author supeng
+ * @date 2020/09/01
+ */
+public class PageDTO {
+
+    private int pageNum;
+    private int pageSize;
+
+    public int getPageNum() {
+        return pageNum;
+    }
+
+    public void setPageNum(int pageNum) {
+        this.pageNum = pageNum;
+    }
+
+    public int getPageSize() {
+        return pageSize;
+    }
+
+    public void setPageSize(int pageSize) {
+        this.pageSize = pageSize;
+    }
+}

+ 8 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/service/LoghubService.java

@@ -0,0 +1,8 @@
+package com.tzld.piaoquan.supply.demand.engine.service;
+
+import java.util.Map;
+
+public interface LoghubService {
+    void addLog(String logProject, String logStore, String logTopic, Map<String, Object> map);
+    void addActionLog(Map<String, Object> map);
+}

+ 75 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/service/impl/LoghubServiceImpl.java

@@ -0,0 +1,75 @@
+package com.tzld.piaoquan.supply.demand.engine.service.impl;
+
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import com.tzld.commons.aliyun.log.AliyunLogManager;
+import com.tzld.piaoquan.supply.demand.engine.service.LoghubService;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Service;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+import java.util.Map;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+
+@Slf4j
+@Service
+public class LoghubServiceImpl implements LoghubService {
+
+    @Value("${aliyun.log.project}")
+    private String project;
+    @Value("${aliyun.log.logstore.action}")
+    private String actionLogStore;
+
+    /**
+     * 线程池队列大小
+     */
+    private static final int QUEUE_MAX_SIZE = 100000;
+    /**
+     * 线程命名
+     */
+    private static final ThreadFactory NAMED_THREAD_FACTORY = new ThreadFactoryBuilder().setNameFormat("loghub-service-pool-%d").build();
+
+    private static ThreadPoolExecutor executor;
+
+    @Autowired
+    private AliyunLogManager aliyunLogManager;
+
+
+    @PostConstruct
+    public void init() {
+        //init thread pool
+        executor = new ThreadPoolExecutor(64, 64,
+                0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(QUEUE_MAX_SIZE), NAMED_THREAD_FACTORY, new ThreadPoolExecutor.AbortPolicy());
+    }
+
+    @PreDestroy
+    public void destroy() {
+        //gracefully shutdown
+        executor.shutdown();
+    }
+
+    @Override
+    public void addLog(String logProject, String logStore, String logTopic, Map<String, Object> map) {
+        executor.execute(() -> submitLog(logProject, logStore, logTopic, map));
+    }
+
+    @Override
+    public void addActionLog(Map<String, Object> map) {
+        executor.execute(() -> submitLog(project, actionLogStore, "", map));
+    }
+
+    private void submitLog(String project, String logStore, String topic, Map<String, Object> data) {
+        try {
+            aliyunLogManager.sendLog(project, logStore, topic, data);
+        } catch (Exception e) {
+            log.error("调用阿里云logHub异常", e);
+        }
+    }
+
+
+}

+ 261 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/util/DateUtil.java

@@ -0,0 +1,261 @@
+package com.tzld.piaoquan.supply.demand.engine.util;
+
+import java.time.*;
+import java.time.format.DateTimeFormatter;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * date util
+ *
+ * @author supeng
+ */
+public final class DateUtil {
+
+    public static final String YYYY_MM_DD_HH_MM_SS = "yyyy-MM-dd HH:mm:ss";
+
+    public static final String YYYY_MM_DD = "yyyy-MM-dd";
+
+    public static final String HH_MM_SS = "HH:mm:ss";
+
+    private static final DateTimeFormatter DEFAULT_DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern(YYYY_MM_DD_HH_MM_SS);
+
+    private static final DateTimeFormatter DEFAULT_DATE_FORMATTER = DateTimeFormatter.ofPattern(YYYY_MM_DD);
+
+    private static final DateTimeFormatter DEFAULT_TIME_FORMATTER = DateTimeFormatter.ofPattern(HH_MM_SS);
+
+    private static final Map<String, DateTimeFormatter> PATTEN_FORMATTER_MAPPER = new HashMap<>();
+
+    static {
+        PATTEN_FORMATTER_MAPPER.put(YYYY_MM_DD_HH_MM_SS, DEFAULT_DATE_TIME_FORMATTER);
+        PATTEN_FORMATTER_MAPPER.put(YYYY_MM_DD, DEFAULT_DATE_FORMATTER);
+        PATTEN_FORMATTER_MAPPER.put(HH_MM_SS, DEFAULT_TIME_FORMATTER);
+    }
+
+    private static DateTimeFormatter cacheFormatterAndGet(String patten) {
+        DateTimeFormatter dateTimeFormatter = PATTEN_FORMATTER_MAPPER.get(patten);
+        if (dateTimeFormatter == null) {
+            dateTimeFormatter = DateTimeFormatter.ofPattern(patten);
+            PATTEN_FORMATTER_MAPPER.put(patten, dateTimeFormatter);
+        }
+        return dateTimeFormatter;
+    }
+
+    /**
+     * @param localDateTime date time
+     * @return yyyy-MM-dd HH:mm:ss
+     */
+    public static String formatLocalDateTime(LocalDateTime localDateTime) {
+        return localDateTime.format(DEFAULT_DATE_TIME_FORMATTER);
+    }
+
+    /**
+     * @param localDateTime time
+     * @param patten        yyyy-MM-dd HH:mm:ss
+     * @return yyyy-MM-dd HH:mm:ss
+     */
+    public static String formatLocalDateTime(LocalDateTime localDateTime, String patten) {
+        DateTimeFormatter dateTimeFormatter = cacheFormatterAndGet(patten);
+        return localDateTime.format(dateTimeFormatter);
+    }
+
+    /**
+     * @param localDate date
+     * @param patten    only date patten
+     * @return yyyy-MM-dd
+     */
+    public static String formatLocalDate(LocalDate localDate, String patten) {
+        DateTimeFormatter dateTimeFormatter = cacheFormatterAndGet(patten);
+        return localDate.format(dateTimeFormatter);
+    }
+
+    /**
+     * @param localDate localDate
+     * @return yyyy-MM-dd
+     */
+    public static String formatLocalDate(LocalDate localDate) {
+        return localDate.format(DEFAULT_DATE_FORMATTER);
+    }
+
+    /**
+     * @param localTime localTime
+     * @param patten    patten
+     * @return HH:mm:ss
+     */
+    public static String formatLocalTime(LocalTime localTime, String patten) {
+        DateTimeFormatter dateTimeFormatter = cacheFormatterAndGet(patten);
+        return localTime.format(dateTimeFormatter);
+    }
+
+    /**
+     * @param localTime localTime
+     * @return HH:mm:ss
+     */
+    public static String formatLocalTime(LocalTime localTime) {
+        return localTime.format(DEFAULT_TIME_FORMATTER);
+    }
+
+    /**
+     * @param date   date time
+     * @param patten patten
+     * @return yyyy-MM-dd HH:mm:ss
+     */
+    public static String format(Date date, String patten) {
+        Instant instant = date.toInstant();
+        LocalDateTime localDateTime = LocalDateTime.ofInstant(instant, ZoneId.systemDefault());
+        return localDateTime.format(cacheFormatterAndGet(patten));
+    }
+
+    /**
+     * @param date date
+     * @return yyyy-MM-dd HH:mm:ss
+     */
+    public static String format(Date date) {
+        return format(date, YYYY_MM_DD_HH_MM_SS);
+    }
+
+    /**
+     * @param date date
+     * @return yyyy-MM-dd
+     */
+    public static String formatDate(Date date) {
+        return format(date, YYYY_MM_DD);
+    }
+
+    /**
+     * @param date date
+     * @return HH:mm:ss
+     */
+    public static String formatTime(Date date) {
+        return format(date, HH_MM_SS);
+    }
+
+    /**
+     * @param mills mills
+     * @return yyyy-MM-dd HH:mm:ss
+     */
+    public static String formatMills(long mills, String patten) {
+        Instant instant = Instant.ofEpochMilli(mills);
+        LocalDateTime localDateTime = LocalDateTime.ofInstant(instant, ZoneId.systemDefault());
+        return formatLocalDateTime(localDateTime, patten);
+    }
+
+    /**
+     * @param mills mills
+     * @return yyyy-MM-dd HH:mm:ss
+     */
+    public static String formatMills(long mills) {
+        return formatMills(mills, YYYY_MM_DD_HH_MM_SS);
+    }
+
+    /**
+     * @param mills mills
+     * @return yyyy-MM-dd
+     */
+    public static String formatMillsDate(long mills) {
+        return formatMills(mills, YYYY_MM_DD);
+    }
+
+    /**
+     * @param date date
+     * @return HH:mm:ss
+     */
+    public static String formatMillsTime(long date) {
+        return formatMills(date, HH_MM_SS);
+    }
+
+    /**
+     * @param date yyyy-MM-dd HH:mm:ss
+     * @return Date
+     */
+    public static Date parse(String date) {
+        LocalDateTime localDateTime = parseToLocalDateTime(date);
+        Instant instant = localDateTime.toInstant(OffsetDateTime.now().getOffset());
+        return Date.from(instant);
+    }
+
+    /**
+     * @param date   string date
+     * @param patten formatter patten
+     * @return LocalDateTime
+     */
+    public static LocalDateTime parseToLocalDateTime(String date, String patten) {
+        return LocalDateTime.parse(date, DateTimeFormatter.ofPattern(patten));
+    }
+
+    /**
+     * @param date yyyy-MM-dd HH:mm:ss
+     * @return LocalDateTime
+     */
+    public static LocalDateTime parseToLocalDateTime(String date) {
+        return LocalDateTime.parse(date, DEFAULT_DATE_TIME_FORMATTER);
+    }
+
+    /**
+     * @param date yyyy-MM-dd
+     * @return LocalDate
+     */
+    public static LocalDate parseToLocalDate(String date) {
+        return LocalDate.parse(date, DEFAULT_DATE_FORMATTER);
+    }
+
+    /**
+     * @param date HH:mm:ss
+     * @return LocalTime
+     */
+    public static LocalTime parseToLocalTime(String date) {
+        return LocalTime.parse(date, DEFAULT_TIME_FORMATTER);
+    }
+
+    /**
+     * @param dayStart date
+     * @param dayEnd   date
+     * @return
+     */
+    public static Period betweenDays(Date dayStart, Date dayEnd) {
+        LocalDateTime localDateTimeStart = LocalDateTime.ofInstant(dayStart.toInstant(), OffsetDateTime.now().getOffset());
+        LocalDateTime localDateTimeEnd = LocalDateTime.ofInstant(dayEnd.toInstant(), OffsetDateTime.now().getOffset());
+        return Period.between(localDateTimeStart.toLocalDate(), localDateTimeEnd.toLocalDate());
+    }
+
+    /**
+     * @param dayStart date
+     * @param dayEnd   date
+     * @return
+     */
+    public static Duration betweenTimes(Date dayStart, Date dayEnd) {
+        LocalDateTime localDateTimeStart = LocalDateTime.ofInstant(dayStart.toInstant(), OffsetDateTime.now().getOffset());
+        LocalDateTime localDateTimeEnd = LocalDateTime.ofInstant(dayEnd.toInstant(), OffsetDateTime.now().getOffset());
+        return Duration.between(localDateTimeStart, localDateTimeEnd);
+    }
+
+    /**
+     * @param dayStart date
+     * @param dayEnd   date
+     * @return
+     */
+    public static Period betweenDays(String dayStart, String dayEnd) {
+        return Period.between(parseToLocalDate(dayStart), parseToLocalDate(dayEnd));
+    }
+
+    /**
+     * @param dayStart date
+     * @param dayEnd   date
+     * @return
+     */
+    public static Duration betweenTimes(String dayStart, String dayEnd) {
+        return Duration.between(parseToLocalDateTime(dayStart), parseToLocalDateTime(dayEnd));
+    }
+
+    public static void main(String[] args) {
+//        LocalDateTime localDateTime = parseToLocalDateTime("2021-11-26","yyyy-MM-dd");
+        LocalDate localDate = parseToLocalDate("2021-11-26");
+//        System.out.println(formatLocalDateTime(localDateTime));
+        LocalDateTime startDateTime = localDate.atTime(0, 0, 0);
+        LocalDateTime endDateTime = localDate.plusDays(1L).atTime(0, 0, 0);
+        System.out.println(formatLocalDateTime(startDateTime));
+        System.out.println(formatLocalDateTime(endDateTime));
+//        System.out.println(formatLocalDate(localDate));
+    }
+}

+ 41 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/util/IpUtil.java

@@ -0,0 +1,41 @@
+package com.tzld.piaoquan.supply.demand.engine.util;
+
+import javax.servlet.http.HttpServletRequest;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+
+public class IpUtil {
+    public static String getIpAddr(HttpServletRequest request) {
+        String ipAddress;
+        ipAddress = request.getHeader("x-forwarded-for");
+        if (ipAddress == null || ipAddress.length() == 0
+                || "unknown".equalsIgnoreCase(ipAddress)) {
+            ipAddress = request.getHeader("Proxy-Client-IP");
+        }
+        if (ipAddress == null || ipAddress.length() == 0
+                || "unknown".equalsIgnoreCase(ipAddress)) {
+            ipAddress = request.getHeader("WL-Proxy-Client-IP");
+        }
+        if (ipAddress == null || ipAddress.length() == 0
+                || "unknown".equalsIgnoreCase(ipAddress)) {
+            ipAddress = request.getRemoteAddr();
+            if ("127.0.0.1".equals(ipAddress)) {
+                // 根据网卡取本机配置的IP
+                InetAddress inet;
+                try {
+                    inet = InetAddress.getLocalHost();
+                    ipAddress = inet.getHostAddress();
+                } catch (UnknownHostException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+        // 对于通过多个代理的情况,第一个IP为客户端真实IP,多个IP按照','分割
+        if (ipAddress != null && ipAddress.length() > 15) { // "***.***.***.***".length()
+            if (ipAddress.indexOf(",") > 0) {
+                ipAddress = ipAddress.substring(0, ipAddress.indexOf(","));
+            }
+        }
+        return ipAddress;
+    }
+}

+ 67 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/util/ODPSManager.java

@@ -0,0 +1,67 @@
+package com.tzld.piaoquan.supply.demand.engine.util;
+
+import com.aliyun.odps.Instance;
+import com.aliyun.odps.Odps;
+import com.aliyun.odps.OdpsException;
+import com.aliyun.odps.account.Account;
+import com.aliyun.odps.account.AliyunAccount;
+import com.aliyun.odps.data.Record;
+import com.aliyun.odps.task.SQLTask;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.stereotype.Component;
+
+import java.text.SimpleDateFormat;
+import java.util.*;
+
+@Slf4j
+@Component
+public class ODPSManager {
+    private final static String ACCESSID = "LTAIWYUujJAm7CbH";
+    private final static String ACCESSKEY = "RfSjdiWwED1sGFlsjXv0DlfTnZTG1P";
+    private final static String ENDPOINT = "http://service.cn.maxcompute.aliyun.com/api";
+
+    public List<Record> query(String sql) {
+        Account account = new AliyunAccount(ACCESSID, ACCESSKEY);
+        Odps odps = new Odps(account);
+        odps.setEndpoint(ENDPOINT);
+        odps.setDefaultProject("loghubods");
+        Instance i;
+        try {
+            i = SQLTask.run(odps, sql);
+            i.waitForSuccess();
+            List<Record> records = SQLTask.getResultByInstanceTunnel(i);
+            if (Objects.nonNull(records) && records.size() != 0) {
+                return records;
+            }
+        } catch (Exception e) {
+            log.error("odps query error", e);
+        }
+        return Collections.emptyList();
+    }
+
+    public static void main(String[] args) {
+        Calendar cal = Calendar.getInstance();
+        cal.setTime(new Date());
+        cal.add(Calendar.DATE, -1);
+        Date date2 = cal.getTime();
+        SimpleDateFormat format2 = new SimpleDateFormat("yyyyMMdd");
+
+        Account account = new AliyunAccount(ACCESSID, ACCESSKEY);
+        Odps odps = new Odps(account);
+        odps.setEndpoint(ENDPOINT);
+        odps.setDefaultProject("loghubods");
+        String sql = "select * from yesterday_return_top1000 where dt='" + format2.format(date2) + "';";
+        Instance i;
+        try {
+            i = SQLTask.run(odps, sql);
+            i.waitForSuccess();
+            List<Record> records = SQLTask.getResult(i);
+            for (Record r : records) {
+                System.out.println(Integer.parseInt(r.get(0).toString()));
+                System.out.println(r.get(1).toString());
+            }
+        } catch (OdpsException e) {
+            e.printStackTrace();
+        }
+    }
+}

+ 930 - 0
supply-demand-engine-core/src/main/java/com/tzld/piaoquan/supply/demand/engine/util/RedisUtil.java

@@ -0,0 +1,930 @@
+package com.tzld.piaoquan.supply.demand.engine.util;
+
+import org.apache.ibatis.cache.CacheException;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.data.redis.core.Cursor;
+import org.springframework.data.redis.core.RedisCallback;
+import org.springframework.data.redis.core.RedisTemplate;
+import org.springframework.data.redis.core.ScanOptions;
+import org.springframework.data.redis.core.ZSetOperations.TypedTuple;
+import org.springframework.data.redis.core.script.DefaultRedisScript;
+import org.springframework.scripting.support.StaticScriptSource;
+import org.springframework.stereotype.Component;
+
+import java.util.*;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * redis 操作
+ *
+ * @author supeng
+ * @date 2020/11/10
+ */
+@Component
+public class RedisUtil {
+    /**
+     * 锁前缀
+     */
+    private static final String LOCK_PREFIX = "LOCK_";
+    /**
+     * 默认重试次数
+     */
+    private static final Integer DEFAULT_RETRIES = 1;
+    /**
+     * 默认 10毫秒
+     */
+    private static final Long DEFAULT_INTERVAL = 10L;
+
+    /**
+     * 加锁lua脚本
+     */
+    private static final String LOCK = "if (redis.call('exists', KEYS[1]) == 0) then " +
+            "redis.call('hset', KEYS[1], ARGV[2], 1); " +
+            "redis.call('pexpire', KEYS[1], ARGV[1]); " +
+            "return 1; " +
+            "end; " +
+            "if (redis.call('hexists', KEYS[1], ARGV[2]) == 1) then " +
+            "redis.call('hincrby', KEYS[1], ARGV[2], 1); " +
+            "redis.call('pexpire', KEYS[1], ARGV[1]); " +
+            "return 1; " +
+            "end; " +
+            "return 0;";
+
+    /**
+     * 解锁 lua 脚本
+     */
+    public static final String UNLOCK = "if (redis.call('hexists', KEYS[1], ARGV[1]) == 0) then " +
+            "return nil; " +
+            "end; " +
+            "local counter = redis.call('hincrby', KEYS[1], ARGV[1], -1); " +
+            "if (counter > 0) then " +
+            "return 0; " +
+            "else " +
+            "redis.call('del', KEYS[1]); " +
+            "return 1; " +
+            "end; " +
+            "return nil;";
+
+    @Qualifier("redisTemplate")
+    @Autowired
+    RedisTemplate<String, String> redisTemplate;
+
+    /** ================Key相关操作================ */
+
+    /**
+     * 是否存在key
+     *
+     * @param key
+     * @return
+     */
+    public Boolean hasKey(String key) {
+        return redisTemplate.hasKey(key);
+    }
+
+    /**
+     * 删除 key
+     *
+     * @param key
+     * @return
+     */
+    public Boolean deleteKey(String key) {
+        return redisTemplate.delete(key);
+    }
+
+    /**
+     * 删除多个key
+     *
+     * @param keys
+     * @return
+     */
+    public Long deleteKeys(Collection<String> keys) {
+        return redisTemplate.delete(keys);
+    }
+
+    /**
+     * 设置key的过期时间
+     *
+     * @param key
+     * @param timeout
+     * @return
+     */
+    public Boolean expire(String key, long timeout) {
+        if (timeout < 0) {
+            return false;
+        }
+        return redisTemplate.expire(key, timeout, TimeUnit.SECONDS);
+    }
+
+    /**
+     * 获取key剩余的过期时间,秒
+     *
+     * @param key
+     * @return key不存在,返回-2,key存在并且没有设置过期时间(永久有效),返回 -1
+     */
+    public Long getExpire(String key) {
+        return redisTemplate.getExpire(key, TimeUnit.SECONDS);
+    }
+
+    /** ================String相关操作================ */
+
+    /**
+     * 获取 值
+     *
+     * @param key
+     * @return
+     */
+    public String get(String key) {
+        return redisTemplate.opsForValue().get(key);
+    }
+
+    /**
+     * 获取多个值
+     *
+     * @param keys
+     * @return
+     */
+    public List<String> multiGet(Collection<String> keys) {
+        if (keys != null && keys.size() > 200) {
+            throw new CacheException("too many keys, max 200");
+        }
+        return redisTemplate.opsForValue().multiGet(keys);
+    }
+
+    /**
+     * set 值
+     *
+     * @param key
+     * @param value
+     */
+    public void set(String key, String value) {
+        redisTemplate.opsForValue().set(key, value);
+    }
+
+    /**
+     * set 值并指定过期时间
+     *
+     * @param key
+     * @param value
+     * @param timeout
+     */
+    public void set(String key, String value, long timeout) {
+        redisTemplate.opsForValue().set(key, value, timeout, TimeUnit.SECONDS);
+    }
+
+    /**
+     * 设置String缓存值,只有key不存在时才能设置成功
+     *
+     * @param key
+     * @param value
+     * @return
+     */
+    public Boolean setNx(String key, String value) {
+        return redisTemplate.opsForValue().setIfAbsent(key, value);
+    }
+
+    /**
+     * 设置String缓存值并指定过期时间,只有key不存在时才能设置成功
+     *
+     * @param key
+     * @param value
+     * @param timeout
+     * @return
+     */
+    public Boolean setNx(String key, String value, long timeout) {
+        return redisTemplate.opsForValue().setIfAbsent(key, value, timeout, TimeUnit.SECONDS);
+    }
+
+    /**
+     * getAndSet 值
+     *
+     * @param key
+     * @param value
+     * @return
+     */
+    public String getAndSet(String key, String value) {
+        return redisTemplate.opsForValue().getAndSet(key, value);
+    }
+
+    /**
+     * key的值 +1
+     *
+     * @param key
+     * @return
+     * @version 1.0
+     */
+    public Long incr(String key) {
+        return redisTemplate.opsForValue().increment(key);
+    }
+
+    /**
+     * key的值 +delta
+     *
+     * @param key
+     * @param delta
+     * @return
+     * @version 1.0
+     */
+    public Long incrBy(String key, long delta) {
+        return redisTemplate.opsForValue().increment(key, delta);
+    }
+
+    /**
+     * key的值 -1
+     *
+     * @param key
+     * @return
+     * @version 1.0
+     */
+    public Long decr(String key) {
+        return redisTemplate.opsForValue().decrement(key);
+    }
+
+    /**
+     * key的值 -delta
+     *
+     * @param key
+     * @param delta
+     * @return
+     * @version 1.0
+     */
+    public Long desrBy(String key, long delta) {
+        return redisTemplate.opsForValue().decrement(key, delta);
+    }
+
+    /** ================Hashes相关操作================ */
+
+    /**
+     * 是否存在 hashkey
+     *
+     * @param key
+     * @param hashKey
+     * @return
+     */
+    public boolean hasHashKey(String key, String hashKey) {
+        return redisTemplate.opsForHash().hasKey(key, hashKey);
+    }
+
+    /**
+     * 获取hash某个字段的值
+     *
+     * @param key
+     * @param hashKey
+     * @return
+     */
+    public Object hget(String key, String hashKey) {
+        return redisTemplate.opsForHash().get(key, hashKey);
+    }
+
+    /**
+     * 获取hash所有键值(hashkey超过200个时禁止使用)
+     *
+     * @param key
+     * @return
+     */
+    public Map<Object, Object> hgetAll(String key) {
+        return redisTemplate.opsForHash().entries(key);
+    }
+
+    /**
+     * 获取hash对应多个字段的值
+     *
+     * @param key
+     * @param hashKeys
+     * @return
+     */
+    public List<Object> hmget(String key, List<Object> hashKeys) {
+        return redisTemplate.opsForHash().multiGet(key, hashKeys);
+    }
+
+    /**
+     * @param key
+     * @param map
+     * @return
+     */
+    public boolean hmset(String key, Map<String, Object> map) {
+        try {
+            redisTemplate.opsForHash().putAll(key, map);
+            return true;
+        } catch (Exception e) {
+            e.printStackTrace();
+            return false;
+        }
+    }
+
+    /**
+     * 不保证原子性
+     *
+     * @param key
+     * @param map
+     * @param expire
+     * @return
+     */
+    public boolean hmset(String key, Map<String, Object> map, long expire) {
+        try {
+            redisTemplate.opsForHash().putAll(key, map);
+            expire(key, expire);
+            return true;
+        } catch (Exception e) {
+            e.printStackTrace();
+            return false;
+        }
+    }
+
+    /**
+     * 设置hash中某个字段的值,如果不存在将创建
+     *
+     * @param key
+     * @param hashKey
+     * @param value
+     */
+    public void hset(String key, String hashKey, String value) {
+        redisTemplate.opsForHash().put(key, hashKey, value);
+    }
+
+    /**
+     * 设置hash中某个字段的值并指定key的过期时间,如果不存在将创建
+     *
+     * @param key
+     * @param hashKey
+     * @param value
+     * @param timeout 单位:秒
+     */
+    public void hset(String key, String hashKey, String value, long timeout) {
+        redisTemplate.opsForHash().put(key, hashKey, value);
+        expire(key, timeout);
+    }
+
+    /**
+     * 设置hash中某个字段的值,只有当这个字段不存在时才能设置成功
+     *
+     * @param key
+     * @param hashKey
+     * @param value
+     * @return
+     */
+    public Boolean hsetNx(String key, String hashKey, String value) {
+        return redisTemplate.opsForHash().putIfAbsent(key, hashKey, value);
+    }
+
+    /**
+     * 删除hash中的多个字段
+     *
+     * @param key
+     * @param hashKey
+     */
+    public void hdel(String key, Object... hashKey) {
+        redisTemplate.opsForHash().delete(key, hashKey);
+    }
+
+    /**
+     * 将hash中的某个字段值增加delta
+     *
+     * @param key
+     * @param hashKey
+     * @param delta
+     * @return
+     */
+    public Long hincrBy(String key, String hashKey, long delta) {
+        return redisTemplate.opsForHash().increment(key, hashKey, delta);
+    }
+
+    /** ================Lists相关操作================ */
+
+    /**
+     * 返回list中指定范围的元素,第一个元素为0,最后一个元素为-1,倒数第二个为-2,以此类推。
+     *
+     * @param key
+     * @param start
+     * @param end
+     * @return
+     */
+    public List<String> lrange(String key, long start, long end) {
+        return redisTemplate.opsForList().range(key, start, end);
+    }
+
+    /**
+     * 获取list的长度
+     *
+     * @param key
+     * @return
+     */
+    public Long llen(String key) {
+        return redisTemplate.opsForList().size(key);
+    }
+
+    /**
+     * 获取list中index索引的元素
+     *
+     * @param key
+     * @param index 索引 index>=0时, 0 表头,1 第二个元素,依次类推;index<0时,-1,表尾,-2倒数第二个元素,依次类推
+     * @return
+     */
+    public String lindex(String key, long index) {
+        return redisTemplate.opsForList().index(key, index);
+    }
+
+    /**
+     * 向list的尾部插入指定的元素
+     *
+     * @param key
+     * @param value
+     * @return
+     */
+    public Long rpush(String key, String value) {
+        return redisTemplate.opsForList().rightPush(key, value);
+    }
+
+    /**
+     * 向list的尾部插入多个元素
+     *
+     * @param key
+     * @param values
+     * @return
+     */
+    public Long rpushAll(String key, Collection<String> values) {
+        return redisTemplate.opsForList().rightPushAll(key, values);
+    }
+
+    /**
+     * 移除并返回list尾部的元素
+     *
+     * @param key
+     * @return
+     */
+    public String rpop(String key) {
+        return redisTemplate.opsForList().rightPop(key);
+    }
+
+    /**
+     * 向list的头部插入指定的元素
+     *
+     * @param key
+     * @param value
+     * @return
+     */
+    public Long lpush(String key, String value) {
+        return redisTemplate.opsForList().leftPush(key, value);
+    }
+
+    /**
+     * 向list的头部插入多个元素
+     *
+     * @param key
+     * @param values
+     * @return
+     */
+    public Long lpushAll(String key, Collection<String> values) {
+        return redisTemplate.opsForList().leftPushAll(key, values);
+    }
+
+    /**
+     * 移除并返回list头部的元素
+     *
+     * @param key
+     * @return
+     */
+    public String lpop(String key) {
+        return redisTemplate.opsForList().leftPop(key);
+    }
+
+    /**
+     * 修改list中index索引对应元素的值
+     *
+     * @param key
+     * @param index
+     * @param value
+     */
+    public void lset(String key, long index, String value) {
+        redisTemplate.opsForList().set(key, index, value);
+    }
+
+    /**
+     * 从list中移除前count个出现值为value的元素
+     *
+     * @param key
+     * @param count count > 0: 从头往尾移除值为 value 的元素。</br>
+     *              count < 0: 从尾往头移除值为 value 的元素。</br>
+     *              count = 0: 移除所有值为 value 的元素。
+     * @param value
+     * @return
+     */
+    public Long lrem(String key, long count, String value) {
+        return redisTemplate.opsForList().remove(key, count, value);
+    }
+
+    /**
+     * lpop多个数据
+     *
+     * @param key
+     * @param count
+     * @return
+     */
+    public List<Object> lpopMutil(String key, long count) {
+        List<Object> result = redisTemplate.executePipelined((RedisCallback<Object>) connection -> {
+            connection.lRange(key.getBytes(), 0, count - 1);
+            connection.lTrim(key.getBytes(), count, -1);
+            return null;
+        });
+        return result;
+    }
+
+    /**
+     * rpop多个数据
+     *
+     * @param key
+     * @param start 起始位置  end=-1
+     * @return
+     */
+    public List<Object> rpopMutil(String key, long start) {
+        List<Object> result = redisTemplate.executePipelined((RedisCallback<Object>) connection -> {
+            connection.lRange(key.getBytes(), start, -1);
+            connection.lTrim(key.getBytes(), 0, start - 1);
+            return null;
+        });
+        return result;
+    }
+
+    /**
+     * 从右取多个值
+     *
+     * @param key
+     * @param count 个数
+     * @return
+     */
+    public List<Object> rpopMutil(String key, int count) {
+        List<Object> result = redisTemplate.executePipelined((RedisCallback<Object>) connection -> {
+            connection.lRange(key.getBytes(), 0 - count, -1);
+            connection.lTrim(key.getBytes(), 0, -1 - count);
+            return null;
+        });
+        return result;
+    }
+
+    /** ================Sets相关操作================ */
+
+    /**
+     * set集合中是否存在值为value的元素
+     *
+     * @param key
+     * @param value
+     * @return
+     */
+    public boolean sIsMember(String key, String value) {
+        return redisTemplate.opsForSet().isMember(key, value);
+    }
+
+    /**
+     * 返回set集合所有元素,禁止直接使用members,通过scan获取
+     *
+     * @param key
+     * @return
+     */
+    public Set<String> sscan(String key) {
+        Set<String> result = new HashSet<>();
+        Cursor<String> cursor = redisTemplate.opsForSet().scan(key, ScanOptions.NONE);
+        while (cursor != null && cursor.hasNext()) {
+            result.add(cursor.next());
+        }
+        return result;
+    }
+
+    /**
+     * 向set集合添加多个元素
+     *
+     * @param key
+     * @param values
+     * @return
+     */
+    public Long sadd(String key, String... values) {
+        return redisTemplate.opsForSet().add(key, values);
+    }
+
+    /**
+     * 返回set集合的元素个数
+     *
+     * @param key
+     * @return
+     */
+    public Long scard(String key) {
+        return redisTemplate.opsForSet().size(key);
+    }
+
+    /**
+     * 从set中移除值为value的元素
+     *
+     * @param key
+     * @param values
+     * @return
+     */
+    public Long srem(String key, Object... values) {
+        return redisTemplate.opsForSet().remove(key, values);
+    }
+
+    /** ================Sorted Sets相关操作================ */
+
+    /**
+     * 向zset集合添加一个元素,或者更新已存在元素的分数
+     *
+     * @param key
+     * @param vaule
+     * @param score
+     */
+    public void zadd(String key, String vaule, double score) {
+        redisTemplate.opsForZSet().add(key, vaule, score);
+    }
+
+    /**
+     * 向zset集合添加一个元素,或者更新已存在元素的分数,并更新key的过期时间
+     *
+     * @param key
+     * @param vaule
+     * @param score
+     */
+    public void zadd(String key, String vaule, double score, long timeout) {
+        redisTemplate.opsForZSet().add(key, vaule, score);
+        expire(key, timeout);
+    }
+
+    /**
+     * 获取某个元素的分数
+     *
+     * @param key
+     * @param value
+     * @return
+     */
+    public Double zscore(String key, String value) {
+        return redisTemplate.opsForZSet().score(key, value);
+    }
+
+    /**
+     * 获取zset集合的元素数量
+     *
+     * @param key
+     * @return
+     */
+    public Long zcard(String key) {
+        return redisTemplate.opsForZSet().zCard(key);
+    }
+
+    /**
+     * zset 范围内元素个数
+     *
+     * @param key
+     * @param min
+     * @param max
+     * @return
+     */
+    public Long zcount(String key, double min, double max) {
+        return redisTemplate.opsForZSet().count(key, min, max);
+    }
+
+    /**
+     * 通过索引区间返回zset集合成指定区间内的元素,分数从小到大排序
+     *
+     * @param key
+     * @param start
+     * @param end
+     * @return
+     */
+    public Set<String> zrange(String key, long start, long end) {
+        return redisTemplate.opsForZSet().range(key, start, end);
+    }
+
+    /**
+     * 通过索引区间返回zset集合成指定区间内的元素,分数从大到小排序
+     *
+     * @param key
+     * @param start
+     * @param end
+     * @return
+     */
+    public Set<String> zreverseRange(String key, long start, long end) {
+        return redisTemplate.opsForZSet().reverseRange(key, start, end);
+    }
+
+    /**
+     * 通过索引区间返回zset集合成指定区间内的元素及分数
+     *
+     * @param key
+     * @param start
+     * @param end
+     * @return
+     */
+    public List<Map<String, Double>> zrangeWithScores(String key, long start, long end) {
+        List<Map<String, Double>> result = new ArrayList<>();
+        Set<TypedTuple<String>> cursor = redisTemplate.opsForZSet().rangeWithScores(key, start, end);
+        for (Iterator<TypedTuple<String>> it = cursor.iterator(); it.hasNext(); ) {
+            TypedTuple<String> item = it.next();
+            Map<String, Double> map = new HashMap<>();
+            map.put(item.getValue(), item.getScore());
+            result.add(map);
+        }
+        return result;
+    }
+
+    /**
+     * 通过索引区间返回zset集合成指定区间内的元素及分数
+     *
+     * @param key
+     * @param start
+     * @param end
+     * @return
+     */
+    public List<Map<String, Object>> zrangeWithScoresToList(String key, long start, long end) {
+        List<Map<String, Object>> result = new ArrayList<>();
+        Set<TypedTuple<String>> cursor = redisTemplate.opsForZSet().rangeWithScores(key, start, end);
+        for (Iterator<TypedTuple<String>> it = cursor.iterator(); it.hasNext(); ) {
+            TypedTuple<String> item = it.next();
+            Map<String, Object> map = new HashMap<>();
+            map.put("value", item.getValue());
+            map.put("score", item.getScore());
+            result.add(map);
+        }
+        return result;
+    }
+
+    /**
+     * 通过索引区间返回zset集合成指定区间内的元素及分数
+     *
+     * @param key
+     * @param start
+     * @param end
+     * @return map
+     */
+    public Map<String, Double> zrangeWithScoresToMap(String key, long start, long end) {
+        Map<String, Double> result = new HashMap<>();
+        Set<TypedTuple<String>> cursor = redisTemplate.opsForZSet().rangeWithScores(key, start, end);
+        for (Iterator<TypedTuple<String>> it = cursor.iterator(); it.hasNext(); ) {
+            TypedTuple<String> item = it.next();
+            result.put(item.getValue(), item.getScore());
+        }
+        return result;
+    }
+
+    /**
+     * 迭代zset集合中的元素(只包括元素成员)
+     *
+     * @param key
+     * @return
+     */
+    public Set<String> zscanValues(String key) {
+        Set<String> result = new HashSet<>();
+        Cursor<TypedTuple<String>> cursor = redisTemplate.opsForZSet().scan(key, ScanOptions.NONE);
+        while (cursor.hasNext()) {
+            TypedTuple<String> item = cursor.next();
+            result.add(item.getValue());
+        }
+        return result;
+    }
+
+    /**
+     * 迭代zset集合中的元素(包括元素和分数)
+     *
+     * @param key
+     * @return
+     */
+    public List<Map<String, Double>> zscan(String key) {
+        List<Map<String, Double>> result = new ArrayList<Map<String, Double>>();
+        Cursor<TypedTuple<String>> cursor = redisTemplate.opsForZSet().scan(key, ScanOptions.NONE);
+        while (cursor.hasNext()) {
+            TypedTuple<String> item = cursor.next();
+            Map<String, Double> map = new HashMap<String, Double>();
+            map.put(item.getValue(), item.getScore());
+            result.add(map);
+        }
+        return result;
+
+    }
+
+    /**
+     * 移除zset集合中的元素
+     *
+     * @param key
+     * @param values
+     */
+    public void zRemove(String key, Object... values) {
+        redisTemplate.opsForZSet().remove(key, values);
+    }
+
+    /**
+     * 移除zset集合指定范围数据
+     *
+     * @param key
+     * @param start
+     * @param end
+     */
+    public void zRemRange(String key, long start, long end) {
+        redisTemplate.opsForZSet().removeRange(key, start, end);
+    }
+
+
+    /**
+     * 删除
+     *
+     * @param key
+     * @param min
+     * @param max
+     */
+    public void zRemRangeByScore(String key, double min, double max) {
+        redisTemplate.opsForZSet().removeRangeByScore(key, min, max);
+    }
+
+    /**
+     * 加锁 不可重入
+     *
+     * @param key
+     * @param timeout 毫秒
+     * @return
+     */
+    public Boolean lock(String key, long timeout) {
+        return redisTemplate.opsForValue().setIfAbsent(LOCK_PREFIX + key, "1", timeout, TimeUnit.MILLISECONDS);
+    }
+
+    /**
+     * 解锁 不可重入
+     *
+     * @param key
+     * @return
+     */
+    public Boolean unlock(String key) {
+        return redisTemplate.delete(LOCK_PREFIX + key);
+    }
+
+    /**
+     * 加锁
+     *
+     * @param key         key
+     * @param reentrantId 重入Id
+     * @param timeout     超时时间 毫秒ms
+     * @return
+     */
+    public Boolean lock(String key, String reentrantId, long timeout) {
+        return lock(key, reentrantId, timeout, DEFAULT_RETRIES);
+    }
+
+    /**
+     * 加锁
+     *
+     * @param key         key
+     * @param reentrantId 重入Id
+     * @param timeout     超时时间 毫秒ms
+     * @param retries     重试次数
+     * @return
+     */
+    public Boolean lock(String key, String reentrantId, long timeout, int retries) {
+        return lock(key, reentrantId, timeout, retries, DEFAULT_INTERVAL);
+    }
+
+    /**
+     * 加锁
+     *
+     * @param key         key
+     * @param reentrantId 重入Id
+     * @param timeout     超时时间 毫秒ms
+     * @param retries     重试次数
+     * @param interval    每次重试间隔时间 毫秒
+     * @return
+     */
+    public Boolean lock(String key, String reentrantId, long timeout, int retries, long interval) {
+        String lockKey = LOCK_PREFIX + key;
+        DefaultRedisScript<Long> script = new DefaultRedisScript<>();
+        script.setResultType(Long.class);
+        script.setScriptSource(new StaticScriptSource(LOCK));
+        for (int i = 0; i < retries; i++) {
+            Object result = redisTemplate.execute(script, Arrays.asList(lockKey), String.valueOf(timeout), reentrantId);
+            if (Objects.nonNull(result) && Objects.equals(1L, Long.valueOf(result.toString()))) {
+                return true;
+            }
+            try {
+                TimeUnit.MILLISECONDS.sleep(interval);
+            } catch (InterruptedException e) {
+                e.printStackTrace();
+                return null;
+            }
+        }
+        return false;
+    }
+
+    /**
+     * 解锁
+     *
+     * @param key         key
+     * @param reentrantId 重入ID
+     */
+    public Boolean unlock(String key, String reentrantId) {
+        String lockKey = LOCK_PREFIX + key;
+        DefaultRedisScript<Long> script = new DefaultRedisScript<>();
+        script.setResultType(Long.class);
+        script.setScriptSource(new StaticScriptSource(UNLOCK));
+        Object result = redisTemplate.execute(script, Arrays.asList(lockKey), reentrantId);
+        if (Objects.isNull(result)) {
+            return null;
+        }
+        if (Objects.equals(1L, Long.valueOf(result.toString()))) {
+            return true;
+        }
+        return false;
+    }
+
+}

+ 53 - 0
supply-demand-engine-core/src/main/resources/mybatis-generator-config.xml

@@ -0,0 +1,53 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE generatorConfiguration
+        PUBLIC "-//mybatis.org//DTD MyBatis Generator Configuration 1.0//EN"
+        "http://mybatis.org/dtd/mybatis-generator-config_1_0.dtd">
+<!-- 配置生成器 -->
+<generatorConfiguration>
+    <context id="mysql" defaultModelType="flat">
+        <property name="autoDelimitKeywords" value="true"/>
+        <!-- 生成的Java文件的编码 -->
+        <property name="javaFileEncoding" value="UTF-8"/>
+        <!-- 格式化java代码 -->
+        <property name="javaFormatter" value="org.mybatis.generator.api.dom.DefaultJavaFormatter"/>
+        <!-- 格式化XML代码 -->
+        <property name="xmlFormatter" value="org.mybatis.generator.api.dom.DefaultXmlFormatter"/>
+        <!-- beginningDelimiter和endingDelimiter:指明数据库的用于标记数据库对象名的符号,比如ORACLE就是双引号,MYSQL默认是`反引号; -->
+        <property name="beginningDelimiter" value="`"/>
+        <property name="endingDelimiter" value="`"/>
+
+        <plugin type="org.mybatis.generator.plugins.ToStringPlugin"></plugin>
+        <plugin type="org.mybatis.generator.plugins.UnmergeableXmlMappersPlugin" />
+
+        <commentGenerator>
+            <property name="addRemarkComments" value="true"/>
+        </commentGenerator>
+
+        <jdbcConnection driverClass="com.mysql.jdbc.Driver"
+                        connectionURL="jdbc:mysql://rm-bp1k5853td1r25g3n690.mysql.rds.aliyuncs.com:3306/flowpool?useUnicode=true&amp;characterEncoding=utf-8&amp;zeroDateTimeBehavior=convertToNull&amp;useSSL=false"
+                        userId="wx2016_longvideo" password="wx2016_longvideoP@assword1234">
+        </jdbcConnection>
+
+        <javaTypeResolver type="org.mybatis.generator.internal.types.JavaTypeResolverDefaultImpl">
+            <property name="forceBigDecimals" value="false"/>
+        </javaTypeResolver>
+
+        <javaModelGenerator targetPackage="com.tzld.piaoquan.supply.demand.engine.model.po" targetProject="src/main/java">
+            <property name="constructorBased" value="false"/>
+            <property name="enableSubPackages" value="true"/>
+            <property name="immutable" value="false"/>
+        </javaModelGenerator>
+
+        <sqlMapGenerator targetPackage="mapper" targetProject="src/main/resources">
+            <property name="enableSubPackages" value="true"/>
+        </sqlMapGenerator>
+
+        <javaClientGenerator targetPackage="com.tzld.piaoquan.supply.demand.engine.dao.mapper" type="XMLMAPPER" targetProject="src/main/java">
+            <property name="enableSubPackages" value="true"/>
+        </javaClientGenerator>
+
+        <table tableName="flow_pool_level_video" domainObjectName="FlowPoolLevelVideo" alias=""/>
+
+    </context>
+
+</generatorConfiguration>

+ 48 - 0
supply-demand-engine-job/pom.xml

@@ -0,0 +1,48 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<groupId>com.tzld.piaoquan</groupId>
+		<artifactId>supply-demand-engine</artifactId>
+		<version>1.0.0</version>
+	</parent>
+	<artifactId>supply-demand-engine-job</artifactId>
+	<name>supply-demand-engine-job</name>
+	<description>supply-demand-engine-job for Spring Boot</description>
+
+	<properties>
+		<java.version>1.8</java.version>
+	</properties>
+
+	<dependencies>
+		<dependency>
+			<groupId>com.tzld.piaoquan</groupId>
+			<artifactId>supply-demand-engine-core</artifactId>
+			<version>1.0.0</version>
+		</dependency>
+	</dependencies>
+
+	<build>
+		<finalName>supply-demand-engine-job</finalName>
+		<plugins>
+			<plugin>
+				<groupId>org.springframework.boot</groupId>
+				<artifactId>spring-boot-maven-plugin</artifactId>
+				<configuration>
+					<mainClass>com.tzld.piaoquan.supply.demand.engine.Application</mainClass>
+					<layout>ZIP</layout>
+				</configuration>
+				<executions>
+					<execution>
+						<goals>
+							<goal>repackage</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
+		</plugins>
+
+	</build>
+
+</project>

+ 31 - 0
supply-demand-engine-job/src/main/java/com/tzld/piaoquan/supply/demand/engine/Application.java

@@ -0,0 +1,31 @@
+package com.tzld.piaoquan.supply.demand.engine;
+
+import lombok.extern.slf4j.Slf4j;
+import org.mybatis.spring.annotation.MapperScan;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.web.servlet.ServletComponentScan;
+import org.springframework.cloud.netflix.eureka.EnableEurekaClient;
+import org.springframework.cloud.openfeign.EnableFeignClients;
+import springfox.documentation.swagger2.annotations.EnableSwagger2;
+
+/**
+ * 启动类
+ *
+ * @author supeng
+ */
+@Slf4j
+@SpringBootApplication
+@MapperScan("com.tzld.piaoquan.supply.demand.engine")
+@ServletComponentScan("com.tzld.piaoquan.supply.demand.engine")
+@EnableFeignClients
+@EnableEurekaClient
+@EnableSwagger2
+public class Application {
+
+	public static void main(String[] args) {
+		SpringApplication.run(Application.class, args);
+		log.info("supply-demand-engine-job SpringBoot Start Success");
+	}
+
+}

+ 77 - 0
supply-demand-engine-job/src/main/resources/application-dev.yml

@@ -0,0 +1,77 @@
+server:
+  port: 8080
+
+eureka:
+  instance:
+    prefer-ip-address: true #是否优先使用IP地址作为主机名的标识,默认false
+    instance-id: ${spring.application.name}:${HOSTNAME}:${server.port} #注册到eureka上的唯一实例ID
+    lease-renewal-interval-in-seconds: 10 #表示eureka client发送心跳给server端的频率,默认30
+    lease-expiration-duration-in-seconds: 30 #表示eureka server至上一次收到client的心跳之后,等待下一次心跳的超时时间,在这个时间内若没收到下一次心跳,则将移除该instance,默认90
+  client:
+    registry-fetch-interval-seconds: 5  #定时从Eureka Server拉取服务注册信息的间隔时间
+    serviceUrl:
+      defaultZone: http://deveureka-internal.piaoquantv.com/eureka/
+
+spring:
+  datasource:
+    driver-class-name: com.mysql.jdbc.Driver
+    url: jdbc:mysql://rm-bp1k5853td1r25g3n690.mysql.rds.aliyuncs.com:3306/flowpool?useUnicode=true&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull
+    username: wx2016_longvideo
+    password: wx2016_longvideoP@assword1234
+    type: com.zaxxer.hikari.HikariDataSource
+    hikari:
+      minimum-idle: 10
+      maximum-pool-size: 20
+      connection-test-query: SELECT 1
+
+  redis:
+    hostName: r-bp1ps6my7lzg8rdhwx682.redis.rds.aliyuncs.com
+    port: 6379
+    password: Wqsd@2019
+    timeout: 1000
+    lettuce:
+      pool:
+        max-active: 8
+        max-wait: -1
+        max-idle: 8
+        min-idle: 0
+
+mybatis:
+  type-aliases-package: com.tzld.piaoquan.flowpool.model.po
+  mapper-locations: classpath:mapper/*.xml
+
+
+logging:
+  file:
+    path: /Users/Heyu/datalog/weblog/${project.name}
+
+
+#xxl-job config https://www.xuxueli.com/xxl-job/#%E6%AD%A5%E9%AA%A4%E4%BA%8C%EF%BC%9A%E6%89%A7%E8%A1%8C%E5%99%A8%E9%85%8D%E7%BD%AE
+xxl:
+  job:
+    admin:
+      addresses: http://127.0.0.1:8080/xxl-job-admin
+    accessToken:
+    executor:
+      appname: ${project.name}
+      address:
+      ip:
+      port: 9999
+      logpath: /datalog/weblog/${project.name}/xxl-job/
+      logretentiondays: 30
+
+app:
+  id: supply-demand-engine-job
+apollo:
+  bootstrap:
+    enabled: true
+    namespaces: application
+  meta: http://devapolloconfig-internal.piaoquantv.com
+  cacheDir: /datalog/apollo-cache-dir
+
+aliyun:
+  log:
+    logstore:
+      action: action-log-test
+      request: request-log-test
+    topic:

+ 77 - 0
supply-demand-engine-job/src/main/resources/application-pre.yml

@@ -0,0 +1,77 @@
+server:
+  port: 8080
+
+eureka:
+  instance:
+    prefer-ip-address: true #是否优先使用IP地址作为主机名的标识,默认false
+    instance-id: ${spring.application.name}:${HOSTNAME}:${server.port} #注册到eureka上的唯一实例ID
+    lease-renewal-interval-in-seconds: 10 #表示eureka client发送心跳给server端的频率,默认30
+    lease-expiration-duration-in-seconds: 30 #表示eureka server至上一次收到client的心跳之后,等待下一次心跳的超时时间,在这个时间内若没收到下一次心跳,则将移除该instance,默认90
+  client:
+    registry-fetch-interval-seconds: 5  #定时从Eureka Server拉取服务注册信息的间隔时间
+    serviceUrl:
+      defaultZone: http://preeureka-internal.piaoquantv.com/eureka/
+
+spring:
+  datasource:
+    driver-class-name: com.mysql.jdbc.Driver
+    url: jdbc:mysql://rm-bp1661607875x9596.mysql.rds.aliyuncs.com:3306/flowpool?useUnicode=true&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull
+    username: wx2016_longvideo
+    password: wx2016_longvideoP@assword1234
+    type: com.zaxxer.hikari.HikariDataSource
+    hikari:
+      minimum-idle: 10
+      maximum-pool-size: 20
+      connection-test-query: SELECT 1
+
+  redis:
+    hostName: r-bp16fg393ej58i51r7.redis.rds.aliyuncs.com
+    port: 6379
+    password: Wqsd@2019
+    timeout: 1000
+    lettuce:
+      pool:
+        max-active: 8
+        max-wait: -1
+        max-idle: 8
+        min-idle: 0
+
+mybatis:
+  type-aliases-package: com.tzld.piaoquan.flowpool.model.po
+  mapper-locations: classpath:mapper/*.xml
+
+
+logging:
+  file:
+    path: /datalog/weblog/${project.name}
+
+#xxl-job config https://www.xuxueli.com/xxl-job/#%E6%AD%A5%E9%AA%A4%E4%BA%8C%EF%BC%9A%E6%89%A7%E8%A1%8C%E5%99%A8%E9%85%8D%E7%BD%AE
+xxl:
+  job:
+    admin:
+      addresses: http://xxl-job-pre-internal.piaoquantv.com/xxl-job-admin
+    accessToken:
+    executor:
+      appname: ${project.name}
+      address:
+      ip:
+      port: 9999
+      logpath: /datalog/weblog/${project.name}/xxl-job/
+      logretentiondays: 30
+
+
+app:
+  id: supply-demand-engine-job
+apollo:
+  bootstrap:
+    enabled: true
+    namespaces: application
+  meta: http://preapolloconfig-internal.piaoquantv.com
+  cacheDir: /datalog/apollo-cache-dir
+
+aliyun:
+  log:
+    logstore:
+      action: action-log-pre
+      request: request-log-pre
+    topic:

+ 75 - 0
supply-demand-engine-job/src/main/resources/application-prod.yml

@@ -0,0 +1,75 @@
+server:
+  port: 8080
+
+eureka:
+  instance:
+    prefer-ip-address: true #是否优先使用IP地址作为主机名的标识,默认false
+    instance-id: ${spring.application.name}:${HOSTNAME}:${server.port} #注册到eureka上的唯一实例ID
+    lease-renewal-interval-in-seconds: 10 #表示eureka client发送心跳给server端的频率,默认30
+    lease-expiration-duration-in-seconds: 30 #表示eureka server至上一次收到client的心跳之后,等待下一次心跳的超时时间,在这个时间内若没收到下一次心跳,则将移除该instance,默认90
+  client:
+    registry-fetch-interval-seconds: 5  #定时从Eureka Server拉取服务注册信息的间隔时间
+    serviceUrl:
+      defaultZone: http://eureka-internal.piaoquantv.com/eureka/
+
+spring:
+  datasource:
+    driver-class-name: com.mysql.jdbc.Driver
+    url: jdbc:mysql://rm-bp1661607875x9596.mysql.rds.aliyuncs.com:3306/flowpool?useUnicode=true&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull
+    username: wx2016_longvideo
+    password: wx2016_longvideoP@assword1234
+    type: com.zaxxer.hikari.HikariDataSource
+    hikari:
+      minimum-idle: 10
+      maximum-pool-size: 20
+      connection-test-query: SELECT 1
+
+  redis:
+    hostName: r-bp16fg393ej58i51r7.redis.rds.aliyuncs.com
+    port: 6379
+    password: Wqsd@2019
+    timeout: 1000
+    lettuce:
+      pool:
+        max-active: 8
+        max-wait: -1
+        max-idle: 8
+        min-idle: 0
+
+mybatis:
+  type-aliases-package: com.tzld.piaoquan.flowpool.model.po
+  mapper-locations: classpath:mapper/*.xml
+
+logging:
+  file:
+    path: /datalog/weblog/${project.name}
+
+#xxl-job config https://www.xuxueli.com/xxl-job/#%E6%AD%A5%E9%AA%A4%E4%BA%8C%EF%BC%9A%E6%89%A7%E8%A1%8C%E5%99%A8%E9%85%8D%E7%BD%AE
+xxl:
+  job:
+    admin:
+      addresses: http://192.168.201.25:8182/xxl-job-admin
+    accessToken:
+    executor:
+      appname: ${project.name}
+      address:
+      ip:
+      port: 9999
+      logpath: /datalog/weblog/${project.name}/xxl-job/
+      logretentiondays: 30
+
+app:
+  id: supply-demand-engine-job
+apollo:
+  bootstrap:
+    enabled: true
+    namespaces: application
+  meta: http://apolloconfig-internal.piaoquantv.com
+  cacheDir: /datalog/apollo-cache-dir
+
+aliyun:
+  log:
+    logstore:
+      action: action-log
+      request: request-log
+    topic:

+ 77 - 0
supply-demand-engine-job/src/main/resources/application-stress.yml

@@ -0,0 +1,77 @@
+server:
+  port: 8080
+
+eureka:
+  instance:
+    prefer-ip-address: true #是否优先使用IP地址作为主机名的标识,默认false
+    instance-id: ${spring.application.name}:${HOSTNAME}:${server.port} #注册到eureka上的唯一实例ID
+    lease-renewal-interval-in-seconds: 10 #表示eureka client发送心跳给server端的频率,默认30
+    lease-expiration-duration-in-seconds: 30 #表示eureka server至上一次收到client的心跳之后,等待下一次心跳的超时时间,在这个时间内若没收到下一次心跳,则将移除该instance,默认90
+  client:
+    registry-fetch-interval-seconds: 5  #定时从Eureka Server拉取服务注册信息的间隔时间
+    serviceUrl:
+      defaultZone: http://testeureka-internal.piaoquantv.com/eureka/
+
+spring:
+  datasource:
+    driver-class-name: com.mysql.jdbc.Driver
+    url: jdbc:mysql://rm-bp1k5853td1r25g3n690.mysql.rds.aliyuncs.com:3306/flowpool?useUnicode=true&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull
+    username: wx2016_longvideo
+    password: wx2016_longvideoP@assword1234
+    type: com.zaxxer.hikari.HikariDataSource
+    hikari:
+      minimum-idle: 10
+      maximum-pool-size: 20
+      connection-test-query: SELECT 1
+
+  redis:
+    hostName: r-bp1ps6my7lzg8rdhwx682.redis.rds.aliyuncs.com
+    port: 6379
+    password: Wqsd@2019
+    timeout: 1000
+    lettuce:
+      pool:
+        max-active: 8
+        max-wait: -1
+        max-idle: 8
+        min-idle: 0
+
+mybatis:
+  type-aliases-package: com.tzld.piaoquan.flowpool.model.po
+  mapper-locations: classpath:mapper/*.xml
+
+
+logging:
+  file:
+    path: /datalog/weblog/${project.name}
+
+#xxl-job config https://www.xuxueli.com/xxl-job/#%E6%AD%A5%E9%AA%A4%E4%BA%8C%EF%BC%9A%E6%89%A7%E8%A1%8C%E5%99%A8%E9%85%8D%E7%BD%AE
+xxl:
+  job:
+    admin:
+      addresses: http://xxl-job-test-internal.piaoquantv.com/xxl-job-admin
+    accessToken:
+    executor:
+      appname: ${project.name}
+      address:
+      ip:
+      port: 9999
+      logpath: /datalog/weblog/${project.name}/xxl-job/
+      logretentiondays: 30
+
+
+app:
+  id: supply-demand-engine-job
+apollo:
+  bootstrap:
+    enabled: true
+    namespaces: application
+  meta: http://testapolloconfig-internal.piaoquantv.com
+  cacheDir: /datalog/apollo-cache-dir
+
+aliyun:
+  log:
+    logstore:
+      action: action-log-test
+      request: request-log-test
+    topic:

+ 75 - 0
supply-demand-engine-job/src/main/resources/application-test.yml

@@ -0,0 +1,75 @@
+server:
+  port: 8080
+
+eureka:
+  instance:
+    prefer-ip-address: true #是否优先使用IP地址作为主机名的标识,默认false
+    instance-id: ${spring.application.name}:${HOSTNAME}:${server.port} #注册到eureka上的唯一实例ID
+    lease-renewal-interval-in-seconds: 10 #表示eureka client发送心跳给server端的频率,默认30
+    lease-expiration-duration-in-seconds: 30 #表示eureka server至上一次收到client的心跳之后,等待下一次心跳的超时时间,在这个时间内若没收到下一次心跳,则将移除该instance,默认90
+  client:
+    registry-fetch-interval-seconds: 5  #定时从Eureka Server拉取服务注册信息的间隔时间
+    serviceUrl:
+      defaultZone: http://testeureka-internal.piaoquantv.com/eureka/
+
+spring:
+  datasource:
+    driver-class-name: com.mysql.jdbc.Driver
+    url: jdbc:mysql://rm-bp1k5853td1r25g3n690.mysql.rds.aliyuncs.com:3306/flowpool?useUnicode=true&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull
+    username: wx2016_longvideo
+    password: wx2016_longvideoP@assword1234
+    type: com.zaxxer.hikari.HikariDataSource
+    hikari:
+      minimum-idle: 10
+      maximum-pool-size: 20
+      connection-test-query: SELECT 1
+
+  redis:
+    hostName: r-bp1ps6my7lzg8rdhwx682.redis.rds.aliyuncs.com
+    port: 6379
+    password: Wqsd@2019
+    timeout: 1000
+    lettuce:
+      pool:
+        max-active: 8
+        max-wait: -1
+        max-idle: 8
+        min-idle: 0
+
+mybatis:
+  type-aliases-package: com.tzld.piaoquan.flowpool.model.po
+  mapper-locations: classpath:mapper/*.xml
+
+logging:
+  file:
+    path: /datalog/weblog/${project.name}
+
+#xxl-job config https://www.xuxueli.com/xxl-job/#%E6%AD%A5%E9%AA%A4%E4%BA%8C%EF%BC%9A%E6%89%A7%E8%A1%8C%E5%99%A8%E9%85%8D%E7%BD%AE
+xxl:
+  job:
+    admin:
+      addresses: http://xxl-job-test-internal.piaoquantv.com/xxl-job-admin
+    accessToken:
+    executor:
+      appname: ${project.name}
+      address:
+      ip:
+      port: 9999
+      logpath: /datalog/weblog/${project.name}/xxl-job/
+      logretentiondays: 30
+
+app:
+  id: supply-demand-engine-job
+apollo:
+  bootstrap:
+    enabled: true
+    namespaces: application
+  meta: http://testapolloconfig-internal.piaoquantv.com
+  cacheDir: /datalog/apollo-cache-dir
+
+aliyun:
+  log:
+    logstore:
+      action: action-log-test
+      request: request-log-test
+    topic:

+ 27 - 0
supply-demand-engine-job/src/main/resources/application.yml

@@ -0,0 +1,27 @@
+spring:
+  application:
+    name: supply-demand-engine-job
+  profiles:
+    active: dev
+
+project:
+  name: supply-demand-engine-job
+
+server:
+  tomcat:
+    threads:
+      max: 1000
+    uri-encoding: UTF-8
+    accept-count: 1000
+  servlet:
+    session:
+      timeout: 60
+pagehelper:
+  helper-dialect: mysql
+
+aliyun:
+  log:
+    endpoint: cn-hangzhou-intranet.log.aliyuncs.com
+    accessKeyId: LTAIP6x1l3DXfSxm
+    accessKeySecret: KbTaM9ars4OX3PMS6Xm7rtxGr1FLon
+    project: supply-demand-engine

+ 193 - 0
supply-demand-engine-job/src/main/resources/logback-spring.xml

@@ -0,0 +1,193 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- 日志级别从低到高分为TRACE < DEBUG < INFO < WARN < ERROR < FATAL,如果设置为WARN,则低于WARN的信息都不会输出 -->
+<!-- scan:当此属性设置为true时,配置文件如果发生改变,将会被重新加载,默认值为true -->
+<!-- scanPeriod:设置监测配置文件是否有修改的时间间隔,如果没有给出时间单位,默认单位是毫秒。当scan为true时,此属性生效。默认的时间间隔为1分钟。 -->
+<!-- debug:当此属性设置为true时,将打印出logback内部日志信息,实时查看logback运行状态。默认值为false。 -->
+<configuration  scan="true" scanPeriod="10 seconds">
+
+    <!--<include resource="org/springframework/boot/logging/logback/base.xml" />-->
+
+    <contextName>logback</contextName>
+    <!-- name的值是变量的名称,value的值时变量定义的值。通过定义的值会被插入到logger上下文中。定义变量后,可以使“${}”来使用变量。 -->
+<!--    <property name="LOG_PATH"  value="${logging.file.path}" />-->
+    <springProperty name="LOG_PATH" source="logging.file.path"/>
+
+    <!-- 彩色日志 -->
+    <!-- 彩色日志依赖的渲染类 -->
+    <conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
+    <conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
+    <conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
+    <!-- 彩色日志格式 -->
+    <property name="CONSOLE_LOG_PATTERN" value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
+
+    <!--输出到控制台-->
+    <appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
+        <!--此日志appender是为开发使用,只配置最底级别,控制台输出的日志级别是大于或等于此级别的日志信息-->
+        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+            <level>info</level>
+        </filter>
+        <encoder>
+            <Pattern>${CONSOLE_LOG_PATTERN}</Pattern>
+            <!-- 设置字符集 -->
+            <charset>UTF-8</charset>
+        </encoder>
+    </appender>
+
+    <!--输出到文件-->
+    <!-- 时间滚动输出 level为 DEBUG 日志 -->
+    <appender name="DEBUG_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <!-- 正在记录的日志文件的路径及文件名 -->
+        <file>${LOG_PATH}/debug.log</file>
+        <!--日志文件输出格式-->
+        <encoder>
+            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level [%X{logTraceId}] %logger{50} - %msg%n</pattern>
+            <charset>UTF-8</charset> <!-- 设置字符集 -->
+        </encoder>
+        <!-- 日志记录器的滚动策略,按日期,按大小记录 -->
+        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+            <!-- 日志归档 -->
+            <fileNamePattern>${LOG_PATH}/debug/log-debug-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
+            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+                <maxFileSize>100MB</maxFileSize>
+            </timeBasedFileNamingAndTriggeringPolicy>
+            <!--日志文件保留天数-->
+            <maxHistory>15</maxHistory>
+        </rollingPolicy>
+        <!-- 此日志文件只记录debug级别的 -->
+        <filter class="ch.qos.logback.classic.filter.LevelFilter">
+            <level>debug</level>
+            <onMatch>ACCEPT</onMatch>
+            <onMismatch>DENY</onMismatch>
+        </filter>
+    </appender>
+
+    <!-- 时间滚动输出 level为 INFO 日志 -->
+    <appender name="INFO_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <!-- 正在记录的日志文件的路径及文件名 -->
+        <file>${LOG_PATH}/info.log</file>
+        <!--日志文件输出格式-->
+        <encoder>
+            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level [%X{logTraceId}] %logger{50} - %msg%n</pattern>
+            <charset>UTF-8</charset>
+        </encoder>
+        <!-- 日志记录器的滚动策略,按日期,按大小记录 -->
+        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+            <!-- 每天日志归档路径以及格式 -->
+            <fileNamePattern>${LOG_PATH}/info/log-info-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
+            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+                <maxFileSize>100MB</maxFileSize>
+            </timeBasedFileNamingAndTriggeringPolicy>
+            <!--日志文件保留天数-->
+            <maxHistory>15</maxHistory>
+        </rollingPolicy>
+        <!-- 此日志文件只记录info级别的 -->
+        <filter class="ch.qos.logback.classic.filter.LevelFilter">
+            <level>info</level>
+            <onMatch>ACCEPT</onMatch>
+            <onMismatch>DENY</onMismatch>
+        </filter>
+    </appender>
+
+    <!-- 时间滚动输出 level为 WARN 日志 -->
+    <appender name="WARN_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <!-- 正在记录的日志文件的路径及文件名 -->
+        <file>${LOG_PATH}/warn.log</file>
+        <!--日志文件输出格式-->
+        <encoder>
+            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level [%X{logTraceId}] %logger{50} - %msg%n</pattern>
+            <charset>UTF-8</charset> <!-- 此处设置字符集 -->
+        </encoder>
+        <!-- 日志记录器的滚动策略,按日期,按大小记录 -->
+        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+            <fileNamePattern>${LOG_PATH}/warn/log-warn-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
+            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+                <maxFileSize>100MB</maxFileSize>
+            </timeBasedFileNamingAndTriggeringPolicy>
+            <!--日志文件保留天数-->
+            <maxHistory>15</maxHistory>
+        </rollingPolicy>
+        <!-- 此日志文件只记录warn级别的 -->
+        <filter class="ch.qos.logback.classic.filter.LevelFilter">
+            <level>warn</level>
+            <onMatch>ACCEPT</onMatch>
+            <onMismatch>DENY</onMismatch>
+        </filter>
+    </appender>
+
+
+    <!-- 时间滚动输出 level为 ERROR 日志 -->
+    <appender name="ERROR_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <!-- 正在记录的日志文件的路径及文件名 -->
+        <file>${LOG_PATH}/error.log</file>
+        <!--日志文件输出格式-->
+        <encoder>
+            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level [%X{logTraceId}] %logger{50} - %msg%n</pattern>
+            <charset>UTF-8</charset> <!-- 此处设置字符集 -->
+        </encoder>
+        <!-- 日志记录器的滚动策略,按日期,按大小记录 -->
+        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+            <fileNamePattern>${LOG_PATH}/error/log-error-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
+            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+                <maxFileSize>100MB</maxFileSize>
+            </timeBasedFileNamingAndTriggeringPolicy>
+            <!--日志文件保留天数-->
+            <maxHistory>15</maxHistory>
+        </rollingPolicy>
+        <!-- 此日志文件只记录ERROR级别的 -->
+        <filter class="ch.qos.logback.classic.filter.LevelFilter">
+            <level>ERROR</level>
+            <onMatch>ACCEPT</onMatch>
+            <onMismatch>DENY</onMismatch>
+        </filter>
+    </appender>
+
+    <!--
+        <logger>用来设置某一个包或者具体的某一个类的日志打印级别、
+        以及指定<appender>。<logger>仅有一个name属性,
+        一个可选的level和一个可选的addtivity属性。
+        name:用来指定受此logger约束的某一个包或者具体的某一个类。
+        level:用来设置打印级别,大小写无关:TRACE, DEBUG, INFO, WARN, ERROR, ALL 和 OFF,
+              还有一个特俗值INHERITED或者同义词NULL,代表强制执行上级的级别。
+              如果未设置此属性,那么当前logger将会继承上级的级别。
+        addtivity:是否向上级logger传递打印信息。默认是true。
+    -->
+    <!--<logger name="org.springframework.web" level="info"/>-->
+    <!--<logger name="org.springframework.scheduling.annotation.ScheduledAnnotationBeanPostProcessor" level="INFO"/>-->
+    <!--
+        使用mybatis的时候,sql语句是debug下才会打印,而这里我们只配置了info,所以想要查看sql语句的话,有以下两种操作:
+        第一种把<root level="info">改成<root level="DEBUG">这样就会打印sql,不过这样日志那边会出现很多其他消息
+        第二种就是单独给dao下目录配置debug模式,代码如下,这样配置sql语句会打印,其他还是正常info级别:
+     -->
+
+    <!--
+        root节点是必选节点,用来指定最基础的日志输出级别,只有一个level属性
+        level:用来设置打印级别,大小写无关:TRACE, DEBUG, INFO, WARN, ERROR, ALL 和 OFF,
+        不能设置为INHERITED或者同义词NULL。默认是DEBUG
+        可以包含零个或多个元素,标识这个appender将会添加到这个logger。
+    -->
+
+    <springProfile name="dev">
+        <logger name="com.tzld.piaoquan.supply.demand.engine" level="info"/>
+    </springProfile>
+    <springProfile name="test">
+        <logger name="com.tzld.piaoquan.supply.demand.engine" level="info"/>
+    </springProfile>
+    <springProfile name="pre">
+        <logger name="com.tzld.piaoquan.supply.demand.engine" level="info"/>
+    </springProfile>
+    <springProfile name="stress">
+        <logger name="com.tzld.piaoquan.supply.demand.engine" level="info"/>
+    </springProfile>
+    <springProfile name="prod">
+        <logger name="com.tzld.piaoquan.supply.demand.engine" level="info"/>
+    </springProfile>
+
+    <root level="info">
+        <appender-ref ref="CONSOLE" />
+        <appender-ref ref="DEBUG_FILE" />
+        <appender-ref ref="INFO_FILE" />
+        <appender-ref ref="WARN_FILE" />
+        <appender-ref ref="ERROR_FILE" />
+    </root>
+
+</configuration>

+ 13 - 0
supply-demand-engine-job/src/test/java/com/tzld/piaoquan/supply/demand/engine/BaseTest.java

@@ -0,0 +1,13 @@
+package com.tzld.piaoquan.supply.demand.engine;
+
+import org.junit.jupiter.api.Test;
+import org.springframework.boot.test.context.SpringBootTest;
+
+@SpringBootTest
+public class BaseTest {
+
+    @Test
+    void contextLoads() {
+    }
+
+}

+ 48 - 0
supply-demand-engine-service/pom.xml

@@ -0,0 +1,48 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>com.tzld.piaoquan</groupId>
+        <artifactId>supply-demand-engine</artifactId>
+        <version>1.0.0</version>
+    </parent>
+    <artifactId>supply-demand-engine-service</artifactId>
+    <name>supply-demand-engine-service</name>
+    <description>supply-demand-engine-service for Spring Boot</description>
+
+    <properties>
+        <java.version>1.8</java.version>
+    </properties>
+
+    <dependencies>
+        <dependency>
+            <groupId>com.tzld.piaoquan</groupId>
+            <artifactId>supply-demand-engine-core</artifactId>
+            <version>1.0.0</version>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <finalName>supply-demand-engine-service</finalName>
+        <plugins>
+            <plugin>
+                <groupId>org.springframework.boot</groupId>
+                <artifactId>spring-boot-maven-plugin</artifactId>
+                <configuration>
+                    <mainClass>com.tzld.piaoquan.supply.demand.engine.Application</mainClass>
+                    <layout>ZIP</layout>
+                </configuration>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>repackage</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+
+    </build>
+
+</project>

+ 31 - 0
supply-demand-engine-service/src/main/java/com/tzld/piaoquan/supply/demand/engine/Application.java

@@ -0,0 +1,31 @@
+package com.tzld.piaoquan.supply.demand.engine;
+
+import org.mybatis.spring.annotation.MapperScan;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.web.servlet.ServletComponentScan;
+import org.springframework.cloud.netflix.eureka.EnableEurekaClient;
+import org.springframework.cloud.openfeign.EnableFeignClients;
+import springfox.documentation.swagger2.annotations.EnableSwagger2;
+
+/**
+ * 启动类
+ *
+ * @author supeng
+ */
+@SpringBootApplication
+@MapperScan("com.tzld.piaoquan.supply.demand.engine")
+@ServletComponentScan("com.tzld.piaoquan.supply.demand.engine")
+@EnableFeignClients
+@EnableEurekaClient
+@EnableSwagger2
+public class Application{
+    private static final Logger LOGGER = LoggerFactory.getLogger(Application.class);
+
+    public static void main(String[] args) {
+        SpringApplication.run(Application.class, args);
+        LOGGER.info("supply-demand-engine-service SpringBoot Start Success");
+    }
+}

+ 24 - 0
supply-demand-engine-service/src/main/java/com/tzld/piaoquan/supply/demand/engine/controller/IndexController.java

@@ -0,0 +1,24 @@
+package com.tzld.piaoquan.supply.demand.engine.controller;
+
+import com.tzld.piaoquan.supply.demand.engine.common.annotation.NoRequestLog;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+@Slf4j
+@RestController
+@RequestMapping("/")
+public class IndexController {
+    /**
+     * 探活
+     *
+     * @return
+     */
+    @NoRequestLog
+    @GetMapping("/healthcheck")
+    public String healthcheck() {
+        log.info("I'm ok");
+        return "ok";
+    }
+}

+ 77 - 0
supply-demand-engine-service/src/main/resources/application-dev.yml

@@ -0,0 +1,77 @@
+server:
+  port: 8080
+
+eureka:
+  instance:
+    prefer-ip-address: true #是否优先使用IP地址作为主机名的标识,默认false
+    instance-id: ${spring.application.name}:${HOSTNAME}:${server.port} #注册到eureka上的唯一实例ID
+    lease-renewal-interval-in-seconds: 10 #表示eureka client发送心跳给server端的频率,默认30
+    lease-expiration-duration-in-seconds: 30 #表示eureka server至上一次收到client的心跳之后,等待下一次心跳的超时时间,在这个时间内若没收到下一次心跳,则将移除该instance,默认90
+  client:
+    registry-fetch-interval-seconds: 5  #定时从Eureka Server拉取服务注册信息的间隔时间
+    serviceUrl:
+      defaultZone: http://deveureka-internal.piaoquantv.com/eureka/
+
+spring:
+  datasource:
+    driver-class-name: com.mysql.jdbc.Driver
+    url: jdbc:mysql://rm-bp1k5853td1r25g3n690.mysql.rds.aliyuncs.com:3306/flowpool?useUnicode=true&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull
+    username: wx2016_longvideo
+    password: wx2016_longvideoP@assword1234
+    type: com.zaxxer.hikari.HikariDataSource
+    hikari:
+      minimum-idle: 10
+      maximum-pool-size: 20
+      connection-test-query: SELECT 1
+
+  redis:
+    hostName: r-bp1ps6my7lzg8rdhwx682.redis.rds.aliyuncs.com
+    port: 6379
+    password: Wqsd@2019
+    timeout: 1000
+    lettuce:
+      pool:
+        max-active: 8
+        max-wait: -1
+        max-idle: 8
+        min-idle: 0
+
+mybatis:
+  type-aliases-package: com.tzld.piaoquan.flowpool.model.po
+  mapper-locations: classpath:mapper/*.xml
+
+
+logging:
+  file:
+    path: /Users/Heyu/datalog/weblog/${project.name}
+
+
+#xxl-job config https://www.xuxueli.com/xxl-job/#%E6%AD%A5%E9%AA%A4%E4%BA%8C%EF%BC%9A%E6%89%A7%E8%A1%8C%E5%99%A8%E9%85%8D%E7%BD%AE
+xxl:
+  job:
+    admin:
+      addresses: http://127.0.0.1:8080/xxl-job-admin
+    accessToken:
+    executor:
+      appname: ${project.name}
+      address:
+      ip:
+      port: 9999
+      logpath: /datalog/weblog/${project.name}/xxl-job/
+      logretentiondays: 30
+
+app:
+  id: supply-demand-engine-service
+apollo:
+  bootstrap:
+    enabled: true
+    namespaces: application
+  meta: http://devapolloconfig-internal.piaoquantv.com
+  cacheDir: /datalog/apollo-cache-dir
+
+aliyun:
+  log:
+    logstore:
+      action: action-log-test
+      request: request-log-test
+    topic:

+ 77 - 0
supply-demand-engine-service/src/main/resources/application-pre.yml

@@ -0,0 +1,77 @@
+server:
+  port: 8080
+
+eureka:
+  instance:
+    prefer-ip-address: true #是否优先使用IP地址作为主机名的标识,默认false
+    instance-id: ${spring.application.name}:${HOSTNAME}:${server.port} #注册到eureka上的唯一实例ID
+    lease-renewal-interval-in-seconds: 10 #表示eureka client发送心跳给server端的频率,默认30
+    lease-expiration-duration-in-seconds: 30 #表示eureka server至上一次收到client的心跳之后,等待下一次心跳的超时时间,在这个时间内若没收到下一次心跳,则将移除该instance,默认90
+  client:
+    registry-fetch-interval-seconds: 5  #定时从Eureka Server拉取服务注册信息的间隔时间
+    serviceUrl:
+      defaultZone: http://preeureka-internal.piaoquantv.com/eureka/
+
+spring:
+  datasource:
+    driver-class-name: com.mysql.jdbc.Driver
+    url: jdbc:mysql://rm-bp1661607875x9596.mysql.rds.aliyuncs.com:3306/flowpool?useUnicode=true&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull
+    username: wx2016_longvideo
+    password: wx2016_longvideoP@assword1234
+    type: com.zaxxer.hikari.HikariDataSource
+    hikari:
+      minimum-idle: 10
+      maximum-pool-size: 20
+      connection-test-query: SELECT 1
+
+  redis:
+    hostName: r-bp16fg393ej58i51r7.redis.rds.aliyuncs.com
+    port: 6379
+    password: Wqsd@2019
+    timeout: 1000
+    lettuce:
+      pool:
+        max-active: 8
+        max-wait: -1
+        max-idle: 8
+        min-idle: 0
+
+mybatis:
+  type-aliases-package: com.tzld.piaoquan.flowpool.model.po
+  mapper-locations: classpath:mapper/*.xml
+
+
+logging:
+  file:
+    path: /datalog/weblog/${project.name}
+
+#xxl-job config https://www.xuxueli.com/xxl-job/#%E6%AD%A5%E9%AA%A4%E4%BA%8C%EF%BC%9A%E6%89%A7%E8%A1%8C%E5%99%A8%E9%85%8D%E7%BD%AE
+xxl:
+  job:
+    admin:
+      addresses: http://xxl-job-pre-internal.piaoquantv.com/xxl-job-admin
+    accessToken:
+    executor:
+      appname: ${project.name}
+      address:
+      ip:
+      port: 9999
+      logpath: /datalog/weblog/${project.name}/xxl-job/
+      logretentiondays: 30
+
+
+app:
+  id: supply-demand-engine-service
+apollo:
+  bootstrap:
+    enabled: true
+    namespaces: application
+  meta: http://preapolloconfig-internal.piaoquantv.com
+  cacheDir: /datalog/apollo-cache-dir
+
+aliyun:
+  log:
+    logstore:
+      action: action-log-pre
+      request: request-log-pre
+    topic:

+ 75 - 0
supply-demand-engine-service/src/main/resources/application-prod.yml

@@ -0,0 +1,75 @@
+server:
+  port: 8080
+
+eureka:
+  instance:
+    prefer-ip-address: true #是否优先使用IP地址作为主机名的标识,默认false
+    instance-id: ${spring.application.name}:${HOSTNAME}:${server.port} #注册到eureka上的唯一实例ID
+    lease-renewal-interval-in-seconds: 10 #表示eureka client发送心跳给server端的频率,默认30
+    lease-expiration-duration-in-seconds: 30 #表示eureka server至上一次收到client的心跳之后,等待下一次心跳的超时时间,在这个时间内若没收到下一次心跳,则将移除该instance,默认90
+  client:
+    registry-fetch-interval-seconds: 5  #定时从Eureka Server拉取服务注册信息的间隔时间
+    serviceUrl:
+      defaultZone: http://eureka-internal.piaoquantv.com/eureka/
+
+spring:
+  datasource:
+    driver-class-name: com.mysql.jdbc.Driver
+    url: jdbc:mysql://rm-bp1661607875x9596.mysql.rds.aliyuncs.com:3306/flowpool?useUnicode=true&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull
+    username: wx2016_longvideo
+    password: wx2016_longvideoP@assword1234
+    type: com.zaxxer.hikari.HikariDataSource
+    hikari:
+      minimum-idle: 10
+      maximum-pool-size: 20
+      connection-test-query: SELECT 1
+
+  redis:
+    hostName: r-bp16fg393ej58i51r7.redis.rds.aliyuncs.com
+    port: 6379
+    password: Wqsd@2019
+    timeout: 1000
+    lettuce:
+      pool:
+        max-active: 8
+        max-wait: -1
+        max-idle: 8
+        min-idle: 0
+
+mybatis:
+  type-aliases-package: com.tzld.piaoquan.flowpool.model.po
+  mapper-locations: classpath:mapper/*.xml
+
+logging:
+  file:
+    path: /datalog/weblog/${project.name}
+
+#xxl-job config https://www.xuxueli.com/xxl-job/#%E6%AD%A5%E9%AA%A4%E4%BA%8C%EF%BC%9A%E6%89%A7%E8%A1%8C%E5%99%A8%E9%85%8D%E7%BD%AE
+xxl:
+  job:
+    admin:
+      addresses: http://192.168.201.25:8182/xxl-job-admin
+    accessToken:
+    executor:
+      appname: ${project.name}
+      address:
+      ip:
+      port: 9999
+      logpath: /datalog/weblog/${project.name}/xxl-job/
+      logretentiondays: 30
+
+app:
+  id: supply-demand-engine-service
+apollo:
+  bootstrap:
+    enabled: true
+    namespaces: application
+  meta: http://apolloconfig-internal.piaoquantv.com
+  cacheDir: /datalog/apollo-cache-dir
+
+aliyun:
+  log:
+    logstore:
+      action: action-log
+      request: request-log
+    topic:

+ 77 - 0
supply-demand-engine-service/src/main/resources/application-stress.yml

@@ -0,0 +1,77 @@
+server:
+  port: 8080
+
+eureka:
+  instance:
+    prefer-ip-address: true #是否优先使用IP地址作为主机名的标识,默认false
+    instance-id: ${spring.application.name}:${HOSTNAME}:${server.port} #注册到eureka上的唯一实例ID
+    lease-renewal-interval-in-seconds: 10 #表示eureka client发送心跳给server端的频率,默认30
+    lease-expiration-duration-in-seconds: 30 #表示eureka server至上一次收到client的心跳之后,等待下一次心跳的超时时间,在这个时间内若没收到下一次心跳,则将移除该instance,默认90
+  client:
+    registry-fetch-interval-seconds: 5  #定时从Eureka Server拉取服务注册信息的间隔时间
+    serviceUrl:
+      defaultZone: http://testeureka-internal.piaoquantv.com/eureka/
+
+spring:
+  datasource:
+    driver-class-name: com.mysql.jdbc.Driver
+    url: jdbc:mysql://rm-bp1k5853td1r25g3n690.mysql.rds.aliyuncs.com:3306/flowpool?useUnicode=true&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull
+    username: wx2016_longvideo
+    password: wx2016_longvideoP@assword1234
+    type: com.zaxxer.hikari.HikariDataSource
+    hikari:
+      minimum-idle: 10
+      maximum-pool-size: 20
+      connection-test-query: SELECT 1
+
+  redis:
+    hostName: r-bp1ps6my7lzg8rdhwx682.redis.rds.aliyuncs.com
+    port: 6379
+    password: Wqsd@2019
+    timeout: 1000
+    lettuce:
+      pool:
+        max-active: 8
+        max-wait: -1
+        max-idle: 8
+        min-idle: 0
+
+mybatis:
+  type-aliases-package: com.tzld.piaoquan.flowpool.model.po
+  mapper-locations: classpath:mapper/*.xml
+
+
+logging:
+  file:
+    path: /datalog/weblog/${project.name}
+
+#xxl-job config https://www.xuxueli.com/xxl-job/#%E6%AD%A5%E9%AA%A4%E4%BA%8C%EF%BC%9A%E6%89%A7%E8%A1%8C%E5%99%A8%E9%85%8D%E7%BD%AE
+xxl:
+  job:
+    admin:
+      addresses: http://xxl-job-test-internal.piaoquantv.com/xxl-job-admin
+    accessToken:
+    executor:
+      appname: ${project.name}
+      address:
+      ip:
+      port: 9999
+      logpath: /datalog/weblog/${project.name}/xxl-job/
+      logretentiondays: 30
+
+
+app:
+  id: supply-demand-engine-service
+apollo:
+  bootstrap:
+    enabled: true
+    namespaces: application
+  meta: http://testapolloconfig-internal.piaoquantv.com
+  cacheDir: /datalog/apollo-cache-dir
+
+aliyun:
+  log:
+    logstore:
+      action: action-log-test
+      request: request-log-test
+    topic:

+ 75 - 0
supply-demand-engine-service/src/main/resources/application-test.yml

@@ -0,0 +1,75 @@
+server:
+  port: 8080
+
+eureka:
+  instance:
+    prefer-ip-address: true #是否优先使用IP地址作为主机名的标识,默认false
+    instance-id: ${spring.application.name}:${HOSTNAME}:${server.port} #注册到eureka上的唯一实例ID
+    lease-renewal-interval-in-seconds: 10 #表示eureka client发送心跳给server端的频率,默认30
+    lease-expiration-duration-in-seconds: 30 #表示eureka server至上一次收到client的心跳之后,等待下一次心跳的超时时间,在这个时间内若没收到下一次心跳,则将移除该instance,默认90
+  client:
+    registry-fetch-interval-seconds: 5  #定时从Eureka Server拉取服务注册信息的间隔时间
+    serviceUrl:
+      defaultZone: http://testeureka-internal.piaoquantv.com/eureka/
+
+spring:
+  datasource:
+    driver-class-name: com.mysql.jdbc.Driver
+    url: jdbc:mysql://rm-bp1k5853td1r25g3n690.mysql.rds.aliyuncs.com:3306/flowpool?useUnicode=true&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull
+    username: wx2016_longvideo
+    password: wx2016_longvideoP@assword1234
+    type: com.zaxxer.hikari.HikariDataSource
+    hikari:
+      minimum-idle: 10
+      maximum-pool-size: 20
+      connection-test-query: SELECT 1
+
+  redis:
+    hostName: r-bp1ps6my7lzg8rdhwx682.redis.rds.aliyuncs.com
+    port: 6379
+    password: Wqsd@2019
+    timeout: 1000
+    lettuce:
+      pool:
+        max-active: 8
+        max-wait: -1
+        max-idle: 8
+        min-idle: 0
+
+mybatis:
+  type-aliases-package: com.tzld.piaoquan.flowpool.model.po
+  mapper-locations: classpath:mapper/*.xml
+
+logging:
+  file:
+    path: /datalog/weblog/${project.name}
+
+#xxl-job config https://www.xuxueli.com/xxl-job/#%E6%AD%A5%E9%AA%A4%E4%BA%8C%EF%BC%9A%E6%89%A7%E8%A1%8C%E5%99%A8%E9%85%8D%E7%BD%AE
+xxl:
+  job:
+    admin:
+      addresses: http://xxl-job-test-internal.piaoquantv.com/xxl-job-admin
+    accessToken:
+    executor:
+      appname: ${project.name}
+      address:
+      ip:
+      port: 9999
+      logpath: /datalog/weblog/${project.name}/xxl-job/
+      logretentiondays: 30
+
+app:
+  id: supply-demand-engine-service
+apollo:
+  bootstrap:
+    enabled: true
+    namespaces: application
+  meta: http://testapolloconfig-internal.piaoquantv.com
+  cacheDir: /datalog/apollo-cache-dir
+
+aliyun:
+  log:
+    logstore:
+      action: action-log-test
+      request: request-log-test
+    topic:

+ 27 - 0
supply-demand-engine-service/src/main/resources/application.yml

@@ -0,0 +1,27 @@
+spring:
+  application:
+    name: supply-demand-engine-service
+  profiles:
+    active: dev
+
+project:
+  name: supply-demand-engine-service
+
+server:
+  tomcat:
+    threads:
+      max: 1000
+    uri-encoding: UTF-8
+    accept-count: 1000
+  servlet:
+    session:
+      timeout: 60
+pagehelper:
+  helper-dialect: mysql
+
+aliyun:
+  log:
+    endpoint: cn-hangzhou-intranet.log.aliyuncs.com
+    accessKeyId: LTAIP6x1l3DXfSxm
+    accessKeySecret: KbTaM9ars4OX3PMS6Xm7rtxGr1FLon
+    project: supply-demand-engine

+ 193 - 0
supply-demand-engine-service/src/main/resources/logback-spring.xml

@@ -0,0 +1,193 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- 日志级别从低到高分为TRACE < DEBUG < INFO < WARN < ERROR < FATAL,如果设置为WARN,则低于WARN的信息都不会输出 -->
+<!-- scan:当此属性设置为true时,配置文件如果发生改变,将会被重新加载,默认值为true -->
+<!-- scanPeriod:设置监测配置文件是否有修改的时间间隔,如果没有给出时间单位,默认单位是毫秒。当scan为true时,此属性生效。默认的时间间隔为1分钟。 -->
+<!-- debug:当此属性设置为true时,将打印出logback内部日志信息,实时查看logback运行状态。默认值为false。 -->
+<configuration  scan="true" scanPeriod="10 seconds">
+
+    <!--<include resource="org/springframework/boot/logging/logback/base.xml" />-->
+
+    <contextName>logback</contextName>
+    <!-- name的值是变量的名称,value的值时变量定义的值。通过定义的值会被插入到logger上下文中。定义变量后,可以使“${}”来使用变量。 -->
+<!--    <property name="LOG_PATH"  value="${logging.file.path}" />-->
+    <springProperty name="LOG_PATH" source="logging.file.path"/>
+
+    <!-- 彩色日志 -->
+    <!-- 彩色日志依赖的渲染类 -->
+    <conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
+    <conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
+    <conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
+    <!-- 彩色日志格式 -->
+    <property name="CONSOLE_LOG_PATTERN" value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
+
+    <!--输出到控制台-->
+    <appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
+        <!--此日志appender是为开发使用,只配置最底级别,控制台输出的日志级别是大于或等于此级别的日志信息-->
+        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+            <level>info</level>
+        </filter>
+        <encoder>
+            <Pattern>${CONSOLE_LOG_PATTERN}</Pattern>
+            <!-- 设置字符集 -->
+            <charset>UTF-8</charset>
+        </encoder>
+    </appender>
+
+    <!--输出到文件-->
+    <!-- 时间滚动输出 level为 DEBUG 日志 -->
+    <appender name="DEBUG_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <!-- 正在记录的日志文件的路径及文件名 -->
+        <file>${LOG_PATH}/debug.log</file>
+        <!--日志文件输出格式-->
+        <encoder>
+            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level [%X{logTraceId}] %logger{50} - %msg%n</pattern>
+            <charset>UTF-8</charset> <!-- 设置字符集 -->
+        </encoder>
+        <!-- 日志记录器的滚动策略,按日期,按大小记录 -->
+        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+            <!-- 日志归档 -->
+            <fileNamePattern>${LOG_PATH}/debug/log-debug-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
+            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+                <maxFileSize>100MB</maxFileSize>
+            </timeBasedFileNamingAndTriggeringPolicy>
+            <!--日志文件保留天数-->
+            <maxHistory>15</maxHistory>
+        </rollingPolicy>
+        <!-- 此日志文件只记录debug级别的 -->
+        <filter class="ch.qos.logback.classic.filter.LevelFilter">
+            <level>debug</level>
+            <onMatch>ACCEPT</onMatch>
+            <onMismatch>DENY</onMismatch>
+        </filter>
+    </appender>
+
+    <!-- 时间滚动输出 level为 INFO 日志 -->
+    <appender name="INFO_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <!-- 正在记录的日志文件的路径及文件名 -->
+        <file>${LOG_PATH}/info.log</file>
+        <!--日志文件输出格式-->
+        <encoder>
+            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level [%X{logTraceId}] %logger{50} - %msg%n</pattern>
+            <charset>UTF-8</charset>
+        </encoder>
+        <!-- 日志记录器的滚动策略,按日期,按大小记录 -->
+        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+            <!-- 每天日志归档路径以及格式 -->
+            <fileNamePattern>${LOG_PATH}/info/log-info-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
+            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+                <maxFileSize>100MB</maxFileSize>
+            </timeBasedFileNamingAndTriggeringPolicy>
+            <!--日志文件保留天数-->
+            <maxHistory>15</maxHistory>
+        </rollingPolicy>
+        <!-- 此日志文件只记录info级别的 -->
+        <filter class="ch.qos.logback.classic.filter.LevelFilter">
+            <level>info</level>
+            <onMatch>ACCEPT</onMatch>
+            <onMismatch>DENY</onMismatch>
+        </filter>
+    </appender>
+
+    <!-- 时间滚动输出 level为 WARN 日志 -->
+    <appender name="WARN_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <!-- 正在记录的日志文件的路径及文件名 -->
+        <file>${LOG_PATH}/warn.log</file>
+        <!--日志文件输出格式-->
+        <encoder>
+            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level [%X{logTraceId}] %logger{50} - %msg%n</pattern>
+            <charset>UTF-8</charset> <!-- 此处设置字符集 -->
+        </encoder>
+        <!-- 日志记录器的滚动策略,按日期,按大小记录 -->
+        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+            <fileNamePattern>${LOG_PATH}/warn/log-warn-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
+            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+                <maxFileSize>100MB</maxFileSize>
+            </timeBasedFileNamingAndTriggeringPolicy>
+            <!--日志文件保留天数-->
+            <maxHistory>15</maxHistory>
+        </rollingPolicy>
+        <!-- 此日志文件只记录warn级别的 -->
+        <filter class="ch.qos.logback.classic.filter.LevelFilter">
+            <level>warn</level>
+            <onMatch>ACCEPT</onMatch>
+            <onMismatch>DENY</onMismatch>
+        </filter>
+    </appender>
+
+
+    <!-- 时间滚动输出 level为 ERROR 日志 -->
+    <appender name="ERROR_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <!-- 正在记录的日志文件的路径及文件名 -->
+        <file>${LOG_PATH}/error.log</file>
+        <!--日志文件输出格式-->
+        <encoder>
+            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level [%X{logTraceId}] %logger{50} - %msg%n</pattern>
+            <charset>UTF-8</charset> <!-- 此处设置字符集 -->
+        </encoder>
+        <!-- 日志记录器的滚动策略,按日期,按大小记录 -->
+        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+            <fileNamePattern>${LOG_PATH}/error/log-error-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
+            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+                <maxFileSize>100MB</maxFileSize>
+            </timeBasedFileNamingAndTriggeringPolicy>
+            <!--日志文件保留天数-->
+            <maxHistory>15</maxHistory>
+        </rollingPolicy>
+        <!-- 此日志文件只记录ERROR级别的 -->
+        <filter class="ch.qos.logback.classic.filter.LevelFilter">
+            <level>ERROR</level>
+            <onMatch>ACCEPT</onMatch>
+            <onMismatch>DENY</onMismatch>
+        </filter>
+    </appender>
+
+    <!--
+        <logger>用来设置某一个包或者具体的某一个类的日志打印级别、
+        以及指定<appender>。<logger>仅有一个name属性,
+        一个可选的level和一个可选的addtivity属性。
+        name:用来指定受此logger约束的某一个包或者具体的某一个类。
+        level:用来设置打印级别,大小写无关:TRACE, DEBUG, INFO, WARN, ERROR, ALL 和 OFF,
+              还有一个特俗值INHERITED或者同义词NULL,代表强制执行上级的级别。
+              如果未设置此属性,那么当前logger将会继承上级的级别。
+        addtivity:是否向上级logger传递打印信息。默认是true。
+    -->
+    <!--<logger name="org.springframework.web" level="info"/>-->
+    <!--<logger name="org.springframework.scheduling.annotation.ScheduledAnnotationBeanPostProcessor" level="INFO"/>-->
+    <!--
+        使用mybatis的时候,sql语句是debug下才会打印,而这里我们只配置了info,所以想要查看sql语句的话,有以下两种操作:
+        第一种把<root level="info">改成<root level="DEBUG">这样就会打印sql,不过这样日志那边会出现很多其他消息
+        第二种就是单独给dao下目录配置debug模式,代码如下,这样配置sql语句会打印,其他还是正常info级别:
+     -->
+
+    <!--
+        root节点是必选节点,用来指定最基础的日志输出级别,只有一个level属性
+        level:用来设置打印级别,大小写无关:TRACE, DEBUG, INFO, WARN, ERROR, ALL 和 OFF,
+        不能设置为INHERITED或者同义词NULL。默认是DEBUG
+        可以包含零个或多个元素,标识这个appender将会添加到这个logger。
+    -->
+
+    <springProfile name="dev">
+        <logger name="com.tzld.piaoquan.supply.demand.engine" level="info"/>
+    </springProfile>
+    <springProfile name="test">
+        <logger name="com.tzld.piaoquan.supply.demand.engine" level="info"/>
+    </springProfile>
+    <springProfile name="pre">
+        <logger name="com.tzld.piaoquan.supply.demand.engine" level="info"/>
+    </springProfile>
+    <springProfile name="stress">
+        <logger name="com.tzld.piaoquan.supply.demand.engine" level="info"/>
+    </springProfile>
+    <springProfile name="prod">
+        <logger name="com.tzld.piaoquan.supply.demand.engine" level="info"/>
+    </springProfile>
+
+    <root level="info">
+        <appender-ref ref="CONSOLE" />
+        <appender-ref ref="DEBUG_FILE" />
+        <appender-ref ref="INFO_FILE" />
+        <appender-ref ref="WARN_FILE" />
+        <appender-ref ref="ERROR_FILE" />
+    </root>
+
+</configuration>

+ 13 - 0
supply-demand-engine-service/src/test/java/com/tzld/piaoquan/supply/demand/engine/BaseTest.java

@@ -0,0 +1,13 @@
+package com.tzld.piaoquan.supply.demand.engine;
+
+import org.junit.jupiter.api.Test;
+import org.springframework.boot.test.context.SpringBootTest;
+
+@SpringBootTest
+public class BaseTest {
+
+    @Test
+    void contextLoads() {
+    }
+
+}