jiazx0107@163.com
2023-10-31 9934bdd6043c55155fea761d0dd687c443849b4b
四川省网关接口相关13
已重命名1个文件
已删除3个文件
已修改10个文件
已添加6个文件
734 ■■■■■ 文件已修改
pom.xml 2 ●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/java/com/fzzy/conf/KafkaConfig.java 82 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/java/com/fzzy/conf/MyPartitionTemplate.java 65 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/java/com/fzzy/gateway/api/DeviceReportService.java 4 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/java/com/fzzy/gateway/api/GatewaySyncGranService.java 4 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/java/com/fzzy/gateway/hx2023/data/GrainDataDetail.java 32 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/java/com/fzzy/gateway/hx2023/data/KafaGrainData.java 20 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/java/com/fzzy/gateway/hx2023/data/KafkaGrainDataDetail.java 58 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/java/com/fzzy/gateway/hx2023/data/KafkaGrainTH.java 24 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/java/com/fzzy/gateway/hx2023/data/TRHInfo.java 15 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/java/com/fzzy/gateway/hx2023/kafka/KafkaDeviceReport.java 4 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/java/com/fzzy/gateway/hx2023/service/DeviceReportServiceImpl.java 4 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/java/com/fzzy/gateway/hx2023/service/HxGatewaySyncGrainImpl.java 4 ●●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/java/com/fzzy/gateway/service/GatewayDeviceService.java 127 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/resources/application-dev.yml 22 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/resources/application-devGateway.yml 75 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/resources/application-gateway.yml 57 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/resources/application-proGateway.yml 81 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/resources/application.yml 2 ●●● 补丁 | 查看 | 原始文档 | blame | 历史
src/main/resources/logback-spring.xml 52 ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史
pom.xml
@@ -149,7 +149,7 @@
        <dependency>
            <groupId>org.springframework.kafka</groupId>
            <artifactId>spring-kafka</artifactId>
            <version>2.2.1.RELEASE</version>
            <version>2.2.0.RELEASE</version>
        </dependency>
        <dependency>
src/main/java/com/fzzy/conf/KafkaConfig.java
¶Ô±ÈÐÂÎļþ
@@ -0,0 +1,82 @@
package com.fzzy.conf;
import lombok.AllArgsConstructor;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.serialization.IntegerSerializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import java.util.HashMap;
import java.util.Map;
@Configuration
@EnableKafka
public class KafkaConfig {
    private String defaultSaslJaasConfig="org.apache.kafka.common.security.scram.ScramLoginModule required username=\"{username}\" password=\"{password}\";";
    @Value("${spring.kafka.bootstrap-servers}")
    private String bootstrapServers;
    @Value("${spring.kafka.properties.security.protocol}")
    private String securityProtocol;
    @Value("${spring.kafka.properties.sasl.mechanism}")
    private String saslMechanism;
    @Value("${spring.kafka.properties.sasl.jaas.config}")
    private String saslJaasConfig;
    @Value("${spring.kafka.properties.sasl.username}")
    private String saslUsername;
    @Value("${spring.kafka.properties.sasl.password}")
    private String saslPassword;
    @Bean
    public ProducerFactory<String, String> producerFactory() {
        DefaultKafkaProducerFactory<String, String> producerFactory = new DefaultKafkaProducerFactory<>(producerConfigs());
        return producerFactory;
    }
    @Bean
    public Map<String, Object> producerConfigs() {
        Map<String, Object> props = new HashMap<>();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        props.put(ProducerConfig.RETRIES_CONFIG, 0);
        props.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, 5000);
        props.put(ProducerConfig.LINGER_MS_CONFIG, 10);
        props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class);
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        props.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, 10000);
        props.put("security.protocol", securityProtocol);
        props.put("sasl.mechanism", saslMechanism);
        props.put("sasl.username", saslUsername);
        props.put("sasl.password", saslPassword);
      //  props.put("sasl.jaas.config", "org.apache.kafka.common.security.scram.ScramLoginModule required username='sc001' password='wCV0ISwmoKwbx1lpBKMW';");
        props.put("sasl.jaas.config",saslJaasConfig);
        return props;
    }
    @Bean
    public KafkaTemplate<String, String> kafkaTemplate(ProducerFactory<String, String> producerFactory) {
        return new KafkaTemplate<>(producerFactory);
    }
}
src/main/java/com/fzzy/conf/MyPartitionTemplate.java
ÎļþÒÑɾ³ý
src/main/java/com/fzzy/gateway/api/DeviceReportService.java
@@ -1,7 +1,7 @@
package com.fzzy.gateway.api;
import com.fzzy.gateway.entity.GatewayDevice;
import com.fzzy.gateway.hx2023.data.GrainData;
import com.fzzy.gateway.hx2023.data.KafaGrainData;
public interface DeviceReportService {
@@ -28,6 +28,6 @@
     * @param data
     * @return
     */
    String pushGrainData2Cloud(GrainData data);
    String pushGrainData2Cloud(KafaGrainData data);
}
src/main/java/com/fzzy/gateway/api/GatewaySyncGranService.java
@@ -1,7 +1,7 @@
package com.fzzy.gateway.api;
import com.fzzy.gateway.hx2023.data.GrainData;
import com.fzzy.gateway.hx2023.data.KafaGrainData;
import com.fzzy.gateway.hx2023.data.*;
/**
@@ -22,7 +22,7 @@
     * @param reqData
     * @return
     */
    public GrainData syncGrain(SyncReqData reqData);
    public KafaGrainData syncGrain(SyncReqData reqData);
}
src/main/java/com/fzzy/gateway/hx2023/data/GrainDataDetail.java
ÎļþÒÑɾ³ý
src/main/java/com/fzzy/gateway/hx2023/data/KafaGrainData.java
ÎļþÃû´Ó src/main/java/com/fzzy/gateway/hx2023/data/GrainData.java ÐÞ¸Ä
@@ -1,14 +1,14 @@
package com.fzzy.gateway.hx2023.data;
import com.alibaba.fastjson2.JSONObject;
import lombok.Data;
import java.util.List;
/**
 * ç²®æƒ…信息
 */
@Data
public class GrainData {
public class KafaGrainData {
    //消息 ID
    private String messageId;
@@ -28,22 +28,22 @@
    //检测时间-格式:yyyy-MM-dd HH:mm:ss
    private String collectTime;
    private String minX = "0";
    private int minX = 0;
    private String minY = "0";
    private int minY = 0;
    private String minZ = "0";
    private int minZ = 0;
    private String maxX = "0";
    private int maxX = 0;
    private String maxY = "0";
    private int maxY = 0;
    private String maxZ = "0";
    private int maxZ = 0;
    //模块识别码-常量,固定传 apiTemperature
    private String apISource = "apiTemperature";
    //粮温详细
    private List<GrainDataDetail> temperature;
    private JSONObject params;
}
src/main/java/com/fzzy/gateway/hx2023/data/KafkaGrainDataDetail.java
¶Ô±ÈÐÂÎļþ
@@ -0,0 +1,58 @@
package com.fzzy.gateway.hx2023.data;
import lombok.Data;
/**
 * ç²®æƒ…信息详细
 */
@Data
public class KafkaGrainDataDetail {
    //根号
    private String cableNum;
    //层号
    private String layerNumber;
    //温度值
    private String temperature;
    //索引,从0开始
    private String position;
    //该温度点所在的列,平方仓、地下仓必填
    private int linex;
    //该温度点所在的行,平方仓、地下仓必填
    private int rowy;
    //浅圆仓、筒仓必填,示例:{\"totalCircle\":3,\"smallCircle\":\"4,10,16\"},totalCircle:总圈数,smallCircle:每圈有几根缆
    private int total_circle;
    //具体圈数--浅圆仓、筒仓必填
    private int circle;
    public KafkaGrainDataDetail() {
    }
    public KafkaGrainDataDetail(String cableNum, String layerNumber, String temperature, String position, int linex, int rowy, int total_circle, int circle) {
        this.cableNum = cableNum;
        this.layerNumber = layerNumber;
        this.temperature = temperature;
        this.position = position;
        this.linex = linex;
        this.rowy = rowy;
        this.total_circle = total_circle;
        this.circle = circle;
    }
    public KafkaGrainDataDetail(String cableNum, String layerNumber, String temperature, String position, int linex, int rowy) {
        this.cableNum = cableNum;
        this.layerNumber = layerNumber;
        this.temperature = temperature;
        this.position = position;
        this.linex = linex;
        this.rowy = rowy;
    }
}
src/main/java/com/fzzy/gateway/hx2023/data/KafkaGrainTH.java
¶Ô±ÈÐÂÎļþ
@@ -0,0 +1,24 @@
package com.fzzy.gateway.hx2023.data;
import lombok.Data;
import java.util.List;
/**
 *粮情信息温湿度信息
 */
@Data
public class KafkaGrainTH {
    //仓湿
    private String humidity;
    //仓温
    private String temperature;
    //气温
    private String airTemperature;
    //气湿
    private String airHumidity;
}
src/main/java/com/fzzy/gateway/hx2023/data/TRHInfo.java
¶Ô±ÈÐÂÎļþ
@@ -0,0 +1,15 @@
package com.fzzy.gateway.hx2023.data;
import lombok.Data;
import java.util.List;
@Data
public class TRHInfo {
    private List<KafkaGrainDataDetail> temperature;
    private List<KafkaGrainTH> temperatureAndhumidity;
}
src/main/java/com/fzzy/gateway/hx2023/kafka/KafkaDeviceReport.java
@@ -1,7 +1,7 @@
package com.fzzy.gateway.hx2023.kafka;
import com.alibaba.fastjson2.JSONObject;
import com.fzzy.gateway.hx2023.data.GrainData;
import com.fzzy.gateway.hx2023.data.KafaGrainData;
import lombok.extern.slf4j.Slf4j;
@@ -30,7 +30,7 @@
     * @param data
     * @return
     */
    public String sendGrainData2Cloud(GrainData data) {
    public String sendGrainData2Cloud(KafaGrainData data) {
        String strData = JSONObject.toJSONString(data);
src/main/java/com/fzzy/gateway/hx2023/service/DeviceReportServiceImpl.java
@@ -6,7 +6,7 @@
import com.fzzy.gateway.api.DeviceReportService;
import com.fzzy.gateway.entity.GatewayDevice;
import com.fzzy.gateway.hx2023.ScConstant;
import com.fzzy.gateway.hx2023.data.GrainData;
import com.fzzy.gateway.hx2023.data.KafaGrainData;
import com.fzzy.gateway.hx2023.data.WebSocketPacket;
import com.fzzy.gateway.hx2023.data.WebSocketPacketHeader;
import com.fzzy.gateway.hx2023.data.WeightInfo;
@@ -77,7 +77,7 @@
    }
    @Override
    public String pushGrainData2Cloud(GrainData data) {
    public String pushGrainData2Cloud(KafaGrainData data) {
        return kafkaDeviceReport.sendGrainData2Cloud(data);
    }
src/main/java/com/fzzy/gateway/hx2023/service/HxGatewaySyncGrainImpl.java
@@ -1,7 +1,7 @@
package com.fzzy.gateway.hx2023.service;
import com.fzzy.gateway.api.GatewaySyncGranService;
import com.fzzy.gateway.hx2023.data.GrainData;
import com.fzzy.gateway.hx2023.data.KafaGrainData;
import com.fzzy.gateway.hx2023.data.SyncReqData;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
@@ -22,7 +22,7 @@
    }
    @Override
    public GrainData syncGrain(SyncReqData reqData) {
    public KafaGrainData syncGrain(SyncReqData reqData) {
        return null;
    }
}
src/main/java/com/fzzy/gateway/service/GatewayDeviceService.java
@@ -1,5 +1,6 @@
package com.fzzy.gateway.service;
import com.alibaba.fastjson2.JSONObject;
import com.bstek.dorado.annotation.DataProvider;
import com.bstek.dorado.annotation.DataResolver;
import com.bstek.dorado.annotation.Expose;
@@ -10,11 +11,12 @@
import com.fzzy.gateway.GatewayUtils;
import com.fzzy.gateway.api.DeviceReportService;
import com.fzzy.gateway.api.GatewayRemoteManager;
import com.fzzy.gateway.data.QueryParam;
import com.fzzy.gateway.entity.GatewayDevice;
import com.fzzy.gateway.hx2023.ScConstant;
import com.fzzy.gateway.hx2023.data.GrainData;
import com.fzzy.gateway.hx2023.data.GrainDataDetail;
import com.fzzy.gateway.hx2023.data.KafaGrainData;
import com.fzzy.gateway.hx2023.data.KafkaGrainDataDetail;
import com.fzzy.gateway.hx2023.data.KafkaGrainTH;
import com.fzzy.gateway.hx2023.data.TRHInfo;
import com.fzzy.gateway.service.repository.GatewayDeviceRep;
import lombok.extern.slf4j.Slf4j;
@@ -121,7 +123,7 @@
     * @return
     */
    @Expose
    public String ajaxTestGrain(Map<String,Object> parameter) {
    public String ajaxTestGrain(Map<String, Object> parameter) {
        //获取设备配置,只针对粮情设备进行执行
@@ -139,32 +141,32 @@
        //如果部署FZZY-IGDS-V40版本系统
        return this.pushByV40(list, start,end);
        return this.pushByV40(list, start, end);
    }
    private String pushByV40(List<GatewayDevice> list, Date start,Date end) {
    private String pushByV40(List<GatewayDevice> list, Date start, Date end) {
        String depotIdSys;
        List<Fz40Grain> listGrain;
        Fz40Grain lastData;
        GrainData pushData;
        KafaGrainData pushData;
        DeviceReportService deviceReportService = null;
        for (GatewayDevice device : list) {
            depotIdSys = device.getDepotIdSys();
            if (StringUtils.isEmpty(depotIdSys)) {
                log.info("--------设备--{}-未配置系统相关仓库编码,无法执行当前操作",device.getDeviceName());
                log.info("--------设备--{}-未配置系统相关仓库编码,无法执行当前操作", device.getDeviceName());
                continue;
            }
            listGrain = fzzy40CommonService.listGrain(depotIdSys, start, end);
            if (null == listGrain || listGrain.isEmpty()) {
                log.info("---------设备---{}--未同步到粮情信息,请确认当前条件下是否有数据",device.getDeviceName());
                log.info("---------设备---{}--未同步到粮情信息,请确认当前条件下是否有数据", device.getDeviceName());
                continue;
            }
@@ -193,8 +195,8 @@
     * @param lastData
     * @return
     */
    private GrainData lastData2PushData(Fz40Grain lastData, GatewayDevice device) {
        GrainData result = new GrainData();
    private KafaGrainData lastData2PushData(Fz40Grain lastData, GatewayDevice device) {
        KafaGrainData result = new KafaGrainData();
        result.setMessageId(ScConstant.getMessageId());
        result.setDeviceID(device.getDeviceId());
@@ -206,63 +208,100 @@
        //层-行-列
        String[] attrCable = lastData.getCable().split("-");
        //层配置
        int layMax = Integer.valueOf(attrCable[0]);
        //针对筒仓配置
        String[] cableCirAtt = new String[0];
        if (StringUtils.isNotEmpty(lastData.getCableCir())) {
            cableCirAtt = lastData.getCableCir().split("-");
            return lastData2PushData2(lastData, device);
        }
        //层行列
        int cableZ = Integer.valueOf(attrCable[0]);
        int cableY = Integer.valueOf(attrCable[1]);
        int cableX = Integer.valueOf(attrCable[2]);
        //温度集合
        String[] attr = lastData.getPoints().split(",");
        //根号
        int cableNum = 1, layerNumber = 1, position = 0;
        int cableNum = 1, position = 0;
        int curCir = 1;//所在圈
        int cirLay = 1;//当前圈的层
        String curTemp;
        List<GrainDataDetail> details = new ArrayList<>();
        List<KafkaGrainDataDetail> temperature = new ArrayList<>();
        int x = 0, y = 0, z = 0;
        for (int i = 0; i < attr.length; i++) {
            position = i;
            curTemp = attr[i];
            z = i % cableZ + 1;
            x = i / (cableZ * cableY);
            y = x * (cableZ * cableY);
            y = (i - y) / cableZ;
            // å€’转Xè½´
            x = cableX - 1 - x;
            //根号
            cableNum = (i / layMax) + 1;
            layerNumber = (i % layMax) + 1;
            cableNum = (i / cableZ) + 1;
            if (cableCirAtt.length > 0) {
                curCir = getCurCir(cableNum, attrCable);
                cirLay = Integer.valueOf(cableCirAtt[curCir - 1]);
                details.add(new GrainDataDetail(cableNum, cirLay, position, curTemp));
            } else {
                //判断最大 TODO å¾…优化
                if (curTemp.equals(result.getMaxTemperature())) {
                    result.setMaxX(cableNum + "");
                    result.setMaxZ(curTemp);
                }
            curTemp = attr[i];
                //判断最小 TODO å¾…优化
                if (curTemp.equals(result.getMinTemperature())) {
                    result.setMinX(cableNum + "");
                    result.setMinZ(curTemp);
                }
                details.add(new GrainDataDetail(cableNum, layerNumber, position, curTemp));
            //判断最大
            if (curTemp.equals(result.getMaxTemperature())) {
                result.setMaxX(x);
                result.setMaxY(y);
                result.setMaxZ(position);
            }
            //判断最小
            if (curTemp.equals(result.getMinTemperature())) {
                result.setMinX(x);
                result.setMinY(y);
                result.setMinZ(position);
            }
            temperature.add(new KafkaGrainDataDetail(cableNum + "", z + "", curTemp, position + "", x, y));
        }
        result.setTemperature(details);
        //粮温信息
        TRHInfo trhInfo = new TRHInfo();
        trhInfo.setTemperature(temperature);
        //仓温度信息
        KafkaGrainTH grainTH = new KafkaGrainTH();
        grainTH.setHumidity(lastData.getHumidityIn() + "");
        grainTH.setTemperature(lastData.getTempIn() + "");
        grainTH.setAirHumidity(lastData.getHumidityOut() + "");
        grainTH.setAirTemperature(lastData.getTempOut() + "");
        List<KafkaGrainTH> temperatureAndhumidity = new ArrayList<>();
        temperatureAndhumidity.add(grainTH);
        trhInfo.setTemperatureAndhumidity(temperatureAndhumidity);
        JSONObject params = new JSONObject();
        params.put("TRHInfo", trhInfo);
        result.setParams(params);
        return result;
    }
    /**
     * é’ˆå¯¹ç­’仓  TODO -----
     *
     * @param lastData
     * @param device
     * @return
     */
    private KafaGrainData lastData2PushData2(Fz40Grain lastData, GatewayDevice device) {
        return null;
    }
    private int getCurCir(int curRoot, String[] cableRuleAtt) {
        int sum = 0;
src/main/resources/application-dev.yml
@@ -82,21 +82,17 @@
    sasl-jaas-config: org.apache.kafka.common.security.scram.ScramLoginModule required username=\"{username}\" password=\"{password}\";"
    sasl-username: sc001
    sasl-password: wCV0ISwmoKwbx1lpBKMW
    producer: # producer ç”Ÿäº§è€…
      retries: 0 # é‡è¯•次数
      acks: 1 # åº”答级别:多少个分区副本备份完成时向生产者发送ack确认(可选0、1、all/-1)
      batch-size: 16384 # æ‰¹é‡å¤§å°
      buffer-memory: 33554432 # ç”Ÿäº§ç«¯ç¼“冲区大小
    producer:
      retries: 0
      acks: 1
      batch-size: 16384
      buffer-memory: 33554432
      key-serializer: org.apache.kafka.common.serialization.StringSerializer
      value-serializer: org.apache.kafka.common.serialization.StringSerializer
    consumer: # consumer消费者
      group-id: fzzygroup # é»˜è®¤çš„æ¶ˆè´¹ç»„ID
      enable-auto-commit: true # æ˜¯å¦è‡ªåŠ¨æäº¤offset
      auto-commit-interval: 100  # æäº¤offset延时(接收到消息后多久提交offset)
      # earliest:当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,从头开始消费
      # latest:当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,消费新产生的该分区下的数据
      # none:topic各分区都存在已提交的offset时,从offset后开始消费;只要有一个分区不存在已提交的offset,则抛出异常
    consumer:
      group-id: fzzygroup
      enable-auto-commit: true
      auto-commit-interval: 100
      auto-offset-reset: latest
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
src/main/resources/application-devGateway.yml
¶Ô±ÈÐÂÎļþ
@@ -0,0 +1,75 @@
server:
  port: 8090
spring:
  datasource:
    #主数据源
    primary:
      url: jdbc:mysql://127.0.0.1:3306/igds_api_5012?useUnicode=true&useSSL=false&characterEncoding=utf-8
      username: root
      password: Abc123..
      driver-class-name: com.mysql.jdbc.Driver
    #次数据源
    secondary:
      url: jdbc:mysql://127.0.0.1:3306/igds_5012?useUnicode=true&useSSL=false&characterEncoding=utf-8
      username: root
      password: Abc123..
      driver-class-name: com.mysql.jdbc.Driver
  jpa:
    #主jpa配置
    primary:
      show-sql: true
      properties:
        hibernate:
          hbm2ddl:
            auto: update
          dialect: org.hibernate.dialect.MySQL5InnoDBDialect
    #次jpa配置
    secondary:
      show-sql: true
      properties:
        hibernate:
          hbm2ddl:
            auto: none
          dialect: org.hibernate.dialect.MySQL5InnoDBDialect
  # Redis相关配置
  redis:
    database: 5
    host: 127.0.0.1
    port: 6379
    password: Redispwd..
    # è¿žæŽ¥æ± æœ€å¤§è¿žæŽ¥æ•°ï¼ˆä½¿ç”¨è´Ÿå€¼è¡¨ç¤ºæ²¡æœ‰é™åˆ¶ï¼‰
    pool:
      max-active: 200
      max-wait: -1
      max-idle: 10
      min-idle: 0
      timeout: 6000
  kafka:
    bootstrap-servers: 103.203.217.16:9092
    properties:
      security.protocol: sasl_plaintext
      sasl.mechanism: PLAIN
      sasl.username: sc001
      sasl.password: wCV0ISwmoKwbx1lpBKMW
      sasl.jaas.config: org.apache.kafka.common.security.scram.ScramLoginModule required username='sc001' password='wCV0ISwmoKwbx1lpBKMW';
    consumer:
      group-id: fzzygroup
      enable-auto-commit: true
      auto-commit-interval: 100
      auto-offset-reset: latest
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
mqtt:
  host: tcp://10.13.4.84:11883
  client-id:
  client-username:
  client-password:
  client-timeout: 10
  client-alive-time: 20
  client-max-connect-times: 5
  client-topics:
  client-qos: 0
  isOpen: false
src/main/resources/application-gateway.yml
ÎļþÒÑɾ³ý
src/main/resources/application-proGateway.yml
¶Ô±ÈÐÂÎļþ
@@ -0,0 +1,81 @@
server:
  port: 8090
spring:
  datasource:
    #主数据源
    primary:
      url: jdbc:mysql://127.0.0.1:3306/igds_api_5012?useUnicode=true&useSSL=false&characterEncoding=utf-8
      username: root
      password: Abc123..
      driver-class-name: com.mysql.jdbc.Driver
    #次数据源
    secondary:
      url: jdbc:mysql://127.0.0.1:3306/igds_5012?useUnicode=true&useSSL=false&characterEncoding=utf-8
      username: root
      password: Abc123..
      driver-class-name: com.mysql.jdbc.Driver
  jpa:
    #主jpa配置
    primary:
      show-sql: true
      properties:
        hibernate:
          hbm2ddl:
            auto: update
          dialect: org.hibernate.dialect.MySQL5InnoDBDialect
    #次jpa配置
    secondary:
      show-sql: true
      properties:
        hibernate:
          hbm2ddl:
            auto: none
          dialect: org.hibernate.dialect.MySQL5InnoDBDialect
  # Redis相关配置
  redis:
    database: 5
    host: 127.0.0.1
    port: 6379
    password: Redispwd..
    # è¿žæŽ¥æ± æœ€å¤§è¿žæŽ¥æ•°ï¼ˆä½¿ç”¨è´Ÿå€¼è¡¨ç¤ºæ²¡æœ‰é™åˆ¶ï¼‰
    pool:
      max-active: 200
      max-wait: -1
      max-idle: 10
      min-idle: 0
      timeout: 6000
  kafka:
    bootstrap-servers: 103.203.217.16:9092
    security-protocol: SASL_PLAINTEXT
    sasl-mechanism: PLAIN
    sasl-jaas-config: org.apache.kafka.common.security.scram.ScramLoginModule required username=\"{username}\" password=\"{password}\";"
    sasl-username: sc001
    sasl-password: wCV0ISwmoKwbx1lpBKMW
    producer:
      retries: 0
      acks: 1
      batch-size: 16384
      buffer-memory: 33554432
      key-serializer: org.apache.kafka.common.serialization.StringSerializer
      value-serializer: org.apache.kafka.common.serialization.StringSerializer
    consumer:
      group-id: fzzygroup
      enable-auto-commit: true
      auto-commit-interval: 100
      auto-offset-reset: latest
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
mqtt:
  host: tcp://10.13.4.84:11883
  client-id:
  client-username:
  client-password:
  client-timeout: 10
  client-alive-time: 20
  client-max-connect-times: 5
  client-topics:
  client-qos: 0
  isOpen: false
src/main/resources/application.yml
@@ -1,7 +1,7 @@
##########################  Server   ##########################
spring:
  profiles:
    active: dev
    active: devGateway
  application:
    name: igds-api
  main:
src/main/resources/logback-spring.xml
@@ -33,6 +33,24 @@
        </root>
    </springProfile>
    <!-- dev环境 -->
    <springProfile name="devGateway">
        <appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
            <encoder>
                <pattern>${PATTERN}</pattern>
            </encoder>
        </appender>
        <logger name="com.fzzy" level="DEBUG"/>
        <logger name="com.fzzy" level="DEBUG"/>
        <logger name="org.hibernate.tool" level="WARN"/>
        <logger name="com.bstek.dorado" level="WARN"/>
        <logger name="org.springframework.beans" level="WARN"/>
        <root level="info">
            <appender-ref ref="CONSOLE"/>
        </root>
    </springProfile>
    <!-- ç”Ÿäº§çŽ¯å¢ƒ -->
    <springProfile name="pro">
        <!-- æ¯å¤©äº§ç”Ÿä¸€ä¸ªæ–‡ä»¶ -->
@@ -69,6 +87,40 @@
    <!-- ç”Ÿäº§çŽ¯å¢ƒ -->
    <springProfile name="proGateway">
        <!-- æ¯å¤©äº§ç”Ÿä¸€ä¸ªæ–‡ä»¶ -->
        <appender name="PRO_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
            <!-- æ–‡ä»¶è·¯å¾„ -->
            <file>${PRO_LOG_HOME}/${APP_MODEL}-info.log</file>
            <!-- æ—¥å¿—记录器的滚动策略,按日期,按大小记录 -->
            <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
                <!-- æ–‡ä»¶åç§° -->
                <fileNamePattern>${PRO_LOG_HOME}/${APP_MODEL}-info-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
                <!--日志文件保留天数 -->
                <maxHistory>60</maxHistory>
                <!-- æ—¥å¿—大小 -->
                <timeBasedFileNamingAndTriggeringPolicy
                        class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
                    <maxFileSize>100MB</maxFileSize>
                </timeBasedFileNamingAndTriggeringPolicy>
            </rollingPolicy>
            <encoder>
                <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
                <charset>UTF-8</charset>
            </encoder>
            <layout class="ch.qos.logback.classic.PatternLayout">
                <pattern>${PATTERN}</pattern>
            </layout>
        </appender>
        <logger name="org.hibernate.tool" level="WARN"/>
        <logger name="com.bstek.dorado" level="WARN"/>
        <root level="info">
            <appender-ref ref="PRO_FILE"/>
        </root>
    </springProfile>
    <!-- ç”Ÿäº§çŽ¯å¢ƒ -->
    <springProfile name="linux">
        <!-- æ¯å¤©äº§ç”Ÿä¸€ä¸ªæ–‡ä»¶ -->
        <appender name="PRO_LINUX_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">