pom.xml
@@ -149,7 +149,7 @@ <dependency> <groupId>org.springframework.kafka</groupId> <artifactId>spring-kafka</artifactId> <version>2.2.1.RELEASE</version> <version>2.2.0.RELEASE</version> </dependency> <dependency> src/main/java/com/fzzy/conf/KafkaConfig.java
¶Ô±ÈÐÂÎļþ @@ -0,0 +1,82 @@ package com.fzzy.conf; import lombok.AllArgsConstructor; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.config.SaslConfigs; import org.apache.kafka.common.serialization.IntegerSerializer; import org.apache.kafka.common.serialization.StringSerializer; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.kafka.annotation.EnableKafka; import org.springframework.kafka.core.DefaultKafkaProducerFactory; import org.springframework.kafka.core.KafkaTemplate; import org.springframework.kafka.core.ProducerFactory; import java.util.HashMap; import java.util.Map; @Configuration @EnableKafka public class KafkaConfig { private String defaultSaslJaasConfig="org.apache.kafka.common.security.scram.ScramLoginModule required username=\"{username}\" password=\"{password}\";"; @Value("${spring.kafka.bootstrap-servers}") private String bootstrapServers; @Value("${spring.kafka.properties.security.protocol}") private String securityProtocol; @Value("${spring.kafka.properties.sasl.mechanism}") private String saslMechanism; @Value("${spring.kafka.properties.sasl.jaas.config}") private String saslJaasConfig; @Value("${spring.kafka.properties.sasl.username}") private String saslUsername; @Value("${spring.kafka.properties.sasl.password}") private String saslPassword; @Bean public ProducerFactory<String, String> producerFactory() { DefaultKafkaProducerFactory<String, String> producerFactory = new DefaultKafkaProducerFactory<>(producerConfigs()); return producerFactory; } @Bean public Map<String, Object> producerConfigs() { Map<String, Object> props = new HashMap<>(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); props.put(ProducerConfig.RETRIES_CONFIG, 0); props.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, 5000); props.put(ProducerConfig.LINGER_MS_CONFIG, 10); props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); props.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, 10000); props.put("security.protocol", securityProtocol); props.put("sasl.mechanism", saslMechanism); props.put("sasl.username", saslUsername); props.put("sasl.password", saslPassword); // props.put("sasl.jaas.config", "org.apache.kafka.common.security.scram.ScramLoginModule required username='sc001' password='wCV0ISwmoKwbx1lpBKMW';"); props.put("sasl.jaas.config",saslJaasConfig); return props; } @Bean public KafkaTemplate<String, String> kafkaTemplate(ProducerFactory<String, String> producerFactory) { return new KafkaTemplate<>(producerFactory); } } src/main/java/com/fzzy/conf/MyPartitionTemplate.java
ÎļþÒÑɾ³ý src/main/java/com/fzzy/gateway/api/DeviceReportService.java
@@ -1,7 +1,7 @@ package com.fzzy.gateway.api; import com.fzzy.gateway.entity.GatewayDevice; import com.fzzy.gateway.hx2023.data.GrainData; import com.fzzy.gateway.hx2023.data.KafaGrainData; public interface DeviceReportService { @@ -28,6 +28,6 @@ * @param data * @return */ String pushGrainData2Cloud(GrainData data); String pushGrainData2Cloud(KafaGrainData data); } src/main/java/com/fzzy/gateway/api/GatewaySyncGranService.java
@@ -1,7 +1,7 @@ package com.fzzy.gateway.api; import com.fzzy.gateway.hx2023.data.GrainData; import com.fzzy.gateway.hx2023.data.KafaGrainData; import com.fzzy.gateway.hx2023.data.*; /** @@ -22,7 +22,7 @@ * @param reqData * @return */ public GrainData syncGrain(SyncReqData reqData); public KafaGrainData syncGrain(SyncReqData reqData); } src/main/java/com/fzzy/gateway/hx2023/data/GrainDataDetail.java
ÎļþÒÑɾ³ý src/main/java/com/fzzy/gateway/hx2023/data/KafaGrainData.java
ÎļþÃû´Ó src/main/java/com/fzzy/gateway/hx2023/data/GrainData.java ÐÞ¸Ä @@ -1,14 +1,14 @@ package com.fzzy.gateway.hx2023.data; import com.alibaba.fastjson2.JSONObject; import lombok.Data; import java.util.List; /** * ç²®æ ä¿¡æ¯ */ @Data public class GrainData { public class KafaGrainData { //æ¶æ¯ ID private String messageId; @@ -28,22 +28,22 @@ //æ£æµæ¶é´-æ ¼å¼ï¼yyyy-MM-dd HH:mm:ss private String collectTime; private String minX = "0"; private int minX = 0; private String minY = "0"; private int minY = 0; private String minZ = "0"; private int minZ = 0; private String maxX = "0"; private int maxX = 0; private String maxY = "0"; private int maxY = 0; private String maxZ = "0"; private int maxZ = 0; //模åè¯å«ç -常éï¼åºå®ä¼ apiTemperature private String apISource = "apiTemperature"; //ç²®æ¸©è¯¦ç» private List<GrainDataDetail> temperature; private JSONObject params; } src/main/java/com/fzzy/gateway/hx2023/data/KafkaGrainDataDetail.java
¶Ô±ÈÐÂÎļþ @@ -0,0 +1,58 @@ package com.fzzy.gateway.hx2023.data; import lombok.Data; /** * ç²®æ ä¿¡æ¯è¯¦ç» */ @Data public class KafkaGrainDataDetail { //æ ¹å· private String cableNum; //å±å· private String layerNumber; //æ¸©åº¦å¼ private String temperature; //ç´¢å¼ï¼ä»0å¼å§ private String position; //è¯¥æ¸©åº¦ç¹æå¨çå,å¹³æ¹ä»ãå°ä¸ä»å¿ å¡« private int linex; //è¯¥æ¸©åº¦ç¹æå¨çè¡,å¹³æ¹ä»ãå°ä¸ä»å¿ å¡« private int rowy; //æµ åä»ãçä»å¿ å¡«ï¼ç¤ºä¾ï¼{\"totalCircle\":3,\"smallCircle\":\"4,10,16\"}ï¼totalCircleï¼æ»åæ°ï¼smallCircleï¼æ¯åæå æ ¹ç¼ private int total_circle; //å ·ä½åæ°--æµ åä»ãçä»å¿ å¡« private int circle; public KafkaGrainDataDetail() { } public KafkaGrainDataDetail(String cableNum, String layerNumber, String temperature, String position, int linex, int rowy, int total_circle, int circle) { this.cableNum = cableNum; this.layerNumber = layerNumber; this.temperature = temperature; this.position = position; this.linex = linex; this.rowy = rowy; this.total_circle = total_circle; this.circle = circle; } public KafkaGrainDataDetail(String cableNum, String layerNumber, String temperature, String position, int linex, int rowy) { this.cableNum = cableNum; this.layerNumber = layerNumber; this.temperature = temperature; this.position = position; this.linex = linex; this.rowy = rowy; } } src/main/java/com/fzzy/gateway/hx2023/data/KafkaGrainTH.java
¶Ô±ÈÐÂÎļþ @@ -0,0 +1,24 @@ package com.fzzy.gateway.hx2023.data; import lombok.Data; import java.util.List; /** *ç²®æ ä¿¡æ¯æ¸©æ¹¿åº¦ä¿¡æ¯ */ @Data public class KafkaGrainTH { //仿¹¿ private String humidity; //仿¸© private String temperature; //æ°æ¸© private String airTemperature; //æ°æ¹¿ private String airHumidity; } src/main/java/com/fzzy/gateway/hx2023/data/TRHInfo.java
¶Ô±ÈÐÂÎļþ @@ -0,0 +1,15 @@ package com.fzzy.gateway.hx2023.data; import lombok.Data; import java.util.List; @Data public class TRHInfo { private List<KafkaGrainDataDetail> temperature; private List<KafkaGrainTH> temperatureAndhumidity; } src/main/java/com/fzzy/gateway/hx2023/kafka/KafkaDeviceReport.java
@@ -1,7 +1,7 @@ package com.fzzy.gateway.hx2023.kafka; import com.alibaba.fastjson2.JSONObject; import com.fzzy.gateway.hx2023.data.GrainData; import com.fzzy.gateway.hx2023.data.KafaGrainData; import lombok.extern.slf4j.Slf4j; @@ -30,7 +30,7 @@ * @param data * @return */ public String sendGrainData2Cloud(GrainData data) { public String sendGrainData2Cloud(KafaGrainData data) { String strData = JSONObject.toJSONString(data); src/main/java/com/fzzy/gateway/hx2023/service/DeviceReportServiceImpl.java
@@ -6,7 +6,7 @@ import com.fzzy.gateway.api.DeviceReportService; import com.fzzy.gateway.entity.GatewayDevice; import com.fzzy.gateway.hx2023.ScConstant; import com.fzzy.gateway.hx2023.data.GrainData; import com.fzzy.gateway.hx2023.data.KafaGrainData; import com.fzzy.gateway.hx2023.data.WebSocketPacket; import com.fzzy.gateway.hx2023.data.WebSocketPacketHeader; import com.fzzy.gateway.hx2023.data.WeightInfo; @@ -77,7 +77,7 @@ } @Override public String pushGrainData2Cloud(GrainData data) { public String pushGrainData2Cloud(KafaGrainData data) { return kafkaDeviceReport.sendGrainData2Cloud(data); } src/main/java/com/fzzy/gateway/hx2023/service/HxGatewaySyncGrainImpl.java
@@ -1,7 +1,7 @@ package com.fzzy.gateway.hx2023.service; import com.fzzy.gateway.api.GatewaySyncGranService; import com.fzzy.gateway.hx2023.data.GrainData; import com.fzzy.gateway.hx2023.data.KafaGrainData; import com.fzzy.gateway.hx2023.data.SyncReqData; import lombok.Data; import lombok.extern.slf4j.Slf4j; @@ -22,7 +22,7 @@ } @Override public GrainData syncGrain(SyncReqData reqData) { public KafaGrainData syncGrain(SyncReqData reqData) { return null; } } src/main/java/com/fzzy/gateway/service/GatewayDeviceService.java
@@ -1,5 +1,6 @@ package com.fzzy.gateway.service; import com.alibaba.fastjson2.JSONObject; import com.bstek.dorado.annotation.DataProvider; import com.bstek.dorado.annotation.DataResolver; import com.bstek.dorado.annotation.Expose; @@ -10,11 +11,12 @@ import com.fzzy.gateway.GatewayUtils; import com.fzzy.gateway.api.DeviceReportService; import com.fzzy.gateway.api.GatewayRemoteManager; import com.fzzy.gateway.data.QueryParam; import com.fzzy.gateway.entity.GatewayDevice; import com.fzzy.gateway.hx2023.ScConstant; import com.fzzy.gateway.hx2023.data.GrainData; import com.fzzy.gateway.hx2023.data.GrainDataDetail; import com.fzzy.gateway.hx2023.data.KafaGrainData; import com.fzzy.gateway.hx2023.data.KafkaGrainDataDetail; import com.fzzy.gateway.hx2023.data.KafkaGrainTH; import com.fzzy.gateway.hx2023.data.TRHInfo; import com.fzzy.gateway.service.repository.GatewayDeviceRep; import lombok.extern.slf4j.Slf4j; @@ -150,7 +152,7 @@ Fz40Grain lastData; GrainData pushData; KafaGrainData pushData; DeviceReportService deviceReportService = null; for (GatewayDevice device : list) { @@ -193,8 +195,8 @@ * @param lastData * @return */ private GrainData lastData2PushData(Fz40Grain lastData, GatewayDevice device) { GrainData result = new GrainData(); private KafaGrainData lastData2PushData(Fz40Grain lastData, GatewayDevice device) { KafaGrainData result = new KafaGrainData(); result.setMessageId(ScConstant.getMessageId()); result.setDeviceID(device.getDeviceId()); @@ -206,63 +208,100 @@ //å±-è¡-å String[] attrCable = lastData.getCable().split("-"); //å±é ç½® int layMax = Integer.valueOf(attrCable[0]); //é对çä»é ç½® String[] cableCirAtt = new String[0]; if (StringUtils.isNotEmpty(lastData.getCableCir())) { cableCirAtt = lastData.getCableCir().split("-"); return lastData2PushData2(lastData, device); } //å±è¡å int cableZ = Integer.valueOf(attrCable[0]); int cableY = Integer.valueOf(attrCable[1]); int cableX = Integer.valueOf(attrCable[2]); //温度éå String[] attr = lastData.getPoints().split(","); //æ ¹å· int cableNum = 1, layerNumber = 1, position = 0; int cableNum = 1, position = 0; int curCir = 1;//æå¨å int cirLay = 1;//å½ååçå± String curTemp; List<GrainDataDetail> details = new ArrayList<>(); List<KafkaGrainDataDetail> temperature = new ArrayList<>(); int x = 0, y = 0, z = 0; for (int i = 0; i < attr.length; i++) { position = i; curTemp = attr[i]; z = i % cableZ + 1; x = i / (cableZ * cableY); y = x * (cableZ * cableY); y = (i - y) / cableZ; // å转Xè½´ x = cableX - 1 - x; //æ ¹å· cableNum = (i / layMax) + 1; layerNumber = (i % layMax) + 1; cableNum = (i / cableZ) + 1; if (cableCirAtt.length > 0) { curCir = getCurCir(cableNum, attrCable); cirLay = Integer.valueOf(cableCirAtt[curCir - 1]); curTemp = attr[i]; details.add(new GrainDataDetail(cableNum, cirLay, position, curTemp)); } else { //夿æå¤§ TODO å¾ ä¼å //夿æå¤§ if (curTemp.equals(result.getMaxTemperature())) { result.setMaxX(cableNum + ""); result.setMaxZ(curTemp); result.setMaxX(x); result.setMaxY(y); result.setMaxZ(position); } //夿æå° TODO å¾ ä¼å //夿æå° if (curTemp.equals(result.getMinTemperature())) { result.setMinX(cableNum + ""); result.setMinZ(curTemp); result.setMinX(x); result.setMinY(y); result.setMinZ(position); } details.add(new GrainDataDetail(cableNum, layerNumber, position, curTemp)); temperature.add(new KafkaGrainDataDetail(cableNum + "", z + "", curTemp, position + "", x, y)); } } //ç²®æ¸©ä¿¡æ¯ TRHInfo trhInfo = new TRHInfo(); trhInfo.setTemperature(temperature); result.setTemperature(details); //仿¸©åº¦ä¿¡æ¯ KafkaGrainTH grainTH = new KafkaGrainTH(); grainTH.setHumidity(lastData.getHumidityIn() + ""); grainTH.setTemperature(lastData.getTempIn() + ""); grainTH.setAirHumidity(lastData.getHumidityOut() + ""); grainTH.setAirTemperature(lastData.getTempOut() + ""); List<KafkaGrainTH> temperatureAndhumidity = new ArrayList<>(); temperatureAndhumidity.add(grainTH); trhInfo.setTemperatureAndhumidity(temperatureAndhumidity); JSONObject params = new JSONObject(); params.put("TRHInfo", trhInfo); result.setParams(params); return result; } /** * é对çä» TODO ----- * * @param lastData * @param device * @return */ private KafaGrainData lastData2PushData2(Fz40Grain lastData, GatewayDevice device) { return null; } private int getCurCir(int curRoot, String[] cableRuleAtt) { int sum = 0; src/main/resources/application-dev.yml
@@ -82,21 +82,17 @@ sasl-jaas-config: org.apache.kafka.common.security.scram.ScramLoginModule required username=\"{username}\" password=\"{password}\";" sasl-username: sc001 sasl-password: wCV0ISwmoKwbx1lpBKMW producer: # producer ç产è retries: 0 # éè¯æ¬¡æ° acks: 1 # åºç级å«:å¤å°ä¸ªååºå¯æ¬å¤ä»½å®ææ¶åç产è åéack确认(å¯é0ã1ãall/-1) batch-size: 16384 # æ¹éå¤§å° buffer-memory: 33554432 # ç产端ç¼å²åºå¤§å° producer: retries: 0 acks: 1 batch-size: 16384 buffer-memory: 33554432 key-serializer: org.apache.kafka.common.serialization.StringSerializer value-serializer: org.apache.kafka.common.serialization.StringSerializer consumer: # consumeræ¶è´¹è group-id: fzzygroup # é»è®¤çæ¶è´¹ç»ID enable-auto-commit: true # æ¯å¦èªå¨æäº¤offset auto-commit-interval: 100 # æäº¤offsetå»¶æ¶(æ¥æ¶å°æ¶æ¯åå¤ä¹ æäº¤offset) # earliest:å½åååºä¸æå·²æäº¤çoffsetæ¶ï¼ä»æäº¤çoffsetå¼å§æ¶è´¹ï¼æ æäº¤çoffsetæ¶ï¼ä»å¤´å¼å§æ¶è´¹ # latest:å½åååºä¸æå·²æäº¤çoffsetæ¶ï¼ä»æäº¤çoffsetå¼å§æ¶è´¹ï¼æ æäº¤çoffsetæ¶ï¼æ¶è´¹æ°äº§çç该ååºä¸çæ°æ® # none:topicåååºé½åå¨å·²æäº¤çoffsetæ¶ï¼ä»offsetåå¼å§æ¶è´¹ï¼åªè¦æä¸ä¸ªååºä¸åå¨å·²æäº¤çoffsetï¼åæåºå¼å¸¸ consumer: group-id: fzzygroup enable-auto-commit: true auto-commit-interval: 100 auto-offset-reset: latest key-deserializer: org.apache.kafka.common.serialization.StringDeserializer value-deserializer: org.apache.kafka.common.serialization.StringDeserializer src/main/resources/application-devGateway.yml
¶Ô±ÈÐÂÎļþ @@ -0,0 +1,75 @@ server: port: 8090 spring: datasource: #ä¸»æ°æ®æº primary: url: jdbc:mysql://127.0.0.1:3306/igds_api_5012?useUnicode=true&useSSL=false&characterEncoding=utf-8 username: root password: Abc123.. driver-class-name: com.mysql.jdbc.Driver #æ¬¡æ°æ®æº secondary: url: jdbc:mysql://127.0.0.1:3306/igds_5012?useUnicode=true&useSSL=false&characterEncoding=utf-8 username: root password: Abc123.. driver-class-name: com.mysql.jdbc.Driver jpa: #主jpaé ç½® primary: show-sql: true properties: hibernate: hbm2ddl: auto: update dialect: org.hibernate.dialect.MySQL5InnoDBDialect #次jpaé ç½® secondary: show-sql: true properties: hibernate: hbm2ddl: auto: none dialect: org.hibernate.dialect.MySQL5InnoDBDialect # Redisç¸å ³é ç½® redis: database: 5 host: 127.0.0.1 port: 6379 password: Redispwd.. # è¿æ¥æ± æå¤§è¿æ¥æ°ï¼ä½¿ç¨è´å¼è¡¨ç¤ºæ²¡æéå¶ï¼ pool: max-active: 200 max-wait: -1 max-idle: 10 min-idle: 0 timeout: 6000 kafka: bootstrap-servers: 103.203.217.16:9092 properties: security.protocol: sasl_plaintext sasl.mechanism: PLAIN sasl.username: sc001 sasl.password: wCV0ISwmoKwbx1lpBKMW sasl.jaas.config: org.apache.kafka.common.security.scram.ScramLoginModule required username='sc001' password='wCV0ISwmoKwbx1lpBKMW'; consumer: group-id: fzzygroup enable-auto-commit: true auto-commit-interval: 100 auto-offset-reset: latest key-deserializer: org.apache.kafka.common.serialization.StringDeserializer value-deserializer: org.apache.kafka.common.serialization.StringDeserializer mqtt: host: tcp://10.13.4.84:11883 client-id: client-username: client-password: client-timeout: 10 client-alive-time: 20 client-max-connect-times: 5 client-topics: client-qos: 0 isOpen: false src/main/resources/application-gateway.yml
ÎļþÒÑɾ³ý src/main/resources/application-proGateway.yml
¶Ô±ÈÐÂÎļþ @@ -0,0 +1,81 @@ server: port: 8090 spring: datasource: #ä¸»æ°æ®æº primary: url: jdbc:mysql://127.0.0.1:3306/igds_api_5012?useUnicode=true&useSSL=false&characterEncoding=utf-8 username: root password: Abc123.. driver-class-name: com.mysql.jdbc.Driver #æ¬¡æ°æ®æº secondary: url: jdbc:mysql://127.0.0.1:3306/igds_5012?useUnicode=true&useSSL=false&characterEncoding=utf-8 username: root password: Abc123.. driver-class-name: com.mysql.jdbc.Driver jpa: #主jpaé ç½® primary: show-sql: true properties: hibernate: hbm2ddl: auto: update dialect: org.hibernate.dialect.MySQL5InnoDBDialect #次jpaé ç½® secondary: show-sql: true properties: hibernate: hbm2ddl: auto: none dialect: org.hibernate.dialect.MySQL5InnoDBDialect # Redisç¸å ³é ç½® redis: database: 5 host: 127.0.0.1 port: 6379 password: Redispwd.. # è¿æ¥æ± æå¤§è¿æ¥æ°ï¼ä½¿ç¨è´å¼è¡¨ç¤ºæ²¡æéå¶ï¼ pool: max-active: 200 max-wait: -1 max-idle: 10 min-idle: 0 timeout: 6000 kafka: bootstrap-servers: 103.203.217.16:9092 security-protocol: SASL_PLAINTEXT sasl-mechanism: PLAIN sasl-jaas-config: org.apache.kafka.common.security.scram.ScramLoginModule required username=\"{username}\" password=\"{password}\";" sasl-username: sc001 sasl-password: wCV0ISwmoKwbx1lpBKMW producer: retries: 0 acks: 1 batch-size: 16384 buffer-memory: 33554432 key-serializer: org.apache.kafka.common.serialization.StringSerializer value-serializer: org.apache.kafka.common.serialization.StringSerializer consumer: group-id: fzzygroup enable-auto-commit: true auto-commit-interval: 100 auto-offset-reset: latest key-deserializer: org.apache.kafka.common.serialization.StringDeserializer value-deserializer: org.apache.kafka.common.serialization.StringDeserializer mqtt: host: tcp://10.13.4.84:11883 client-id: client-username: client-password: client-timeout: 10 client-alive-time: 20 client-max-connect-times: 5 client-topics: client-qos: 0 isOpen: false src/main/resources/application.yml
@@ -1,7 +1,7 @@ ########################## Server ########################## spring: profiles: active: dev active: devGateway application: name: igds-api main: src/main/resources/logback-spring.xml
@@ -33,6 +33,24 @@ </root> </springProfile> <!-- devç¯å¢ --> <springProfile name="devGateway"> <appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender"> <encoder> <pattern>${PATTERN}</pattern> </encoder> </appender> <logger name="com.fzzy" level="DEBUG"/> <logger name="com.fzzy" level="DEBUG"/> <logger name="org.hibernate.tool" level="WARN"/> <logger name="com.bstek.dorado" level="WARN"/> <logger name="org.springframework.beans" level="WARN"/> <root level="info"> <appender-ref ref="CONSOLE"/> </root> </springProfile> <!-- ç产ç¯å¢ --> <springProfile name="pro"> <!-- æ¯å¤©äº§çä¸ä¸ªæä»¶ --> @@ -69,6 +87,40 @@ <!-- ç产ç¯å¢ --> <springProfile name="proGateway"> <!-- æ¯å¤©äº§çä¸ä¸ªæä»¶ --> <appender name="PRO_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> <!-- æä»¶è·¯å¾ --> <file>${PRO_LOG_HOME}/${APP_MODEL}-info.log</file> <!-- æ¥å¿è®°å½å¨çæ»å¨çç¥ï¼ææ¥æï¼æå¤§å°è®°å½ --> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> <!-- æä»¶åç§° --> <fileNamePattern>${PRO_LOG_HOME}/${APP_MODEL}-info-%d{yyyy-MM-dd}.%i.log</fileNamePattern> <!--æ¥å¿æä»¶ä¿çå¤©æ° --> <maxHistory>60</maxHistory> <!-- æ¥å¿å¤§å° --> <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"> <maxFileSize>100MB</maxFileSize> </timeBasedFileNamingAndTriggeringPolicy> </rollingPolicy> <encoder> <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern> <charset>UTF-8</charset> </encoder> <layout class="ch.qos.logback.classic.PatternLayout"> <pattern>${PATTERN}</pattern> </layout> </appender> <logger name="org.hibernate.tool" level="WARN"/> <logger name="com.bstek.dorado" level="WARN"/> <root level="info"> <appender-ref ref="PRO_FILE"/> </root> </springProfile> <!-- ç产ç¯å¢ --> <springProfile name="linux"> <!-- æ¯å¤©äº§çä¸ä¸ªæä»¶ --> <appender name="PRO_LINUX_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">