| | |
| | | sasl-jaas-config: org.apache.kafka.common.security.scram.ScramLoginModule required username=\"{username}\" password=\"{password}\";" |
| | | sasl-username: sc001 |
| | | sasl-password: wCV0ISwmoKwbx1lpBKMW |
| | | producer: # producer 生产者 |
| | | retries: 0 # 重试次数 |
| | | acks: 1 # 应答级别:多少个分区副本备份完成时向生产者发送ack确认(可选0、1、all/-1) |
| | | batch-size: 16384 # 批量大小 |
| | | buffer-memory: 33554432 # 生产端缓冲区大小 |
| | | producer: |
| | | retries: 0 |
| | | acks: 1 |
| | | batch-size: 16384 |
| | | buffer-memory: 33554432 |
| | | key-serializer: org.apache.kafka.common.serialization.StringSerializer |
| | | value-serializer: org.apache.kafka.common.serialization.StringSerializer |
| | | consumer: # consumer消费者 |
| | | group-id: fzzygroup # 默认的消费组ID |
| | | enable-auto-commit: true # 是否自动提交offset |
| | | auto-commit-interval: 100 # 提交offset延时(接收到消息后多久提交offset) |
| | | |
| | | # earliest:当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,从头开始消费 |
| | | # latest:当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,消费新产生的该分区下的数据 |
| | | # none:topic各分区都存在已提交的offset时,从offset后开始消费;只要有一个分区不存在已提交的offset,则抛出异常 |
| | | consumer: |
| | | group-id: fzzygroup |
| | | enable-auto-commit: true |
| | | auto-commit-interval: 100 |
| | | auto-offset-reset: latest |
| | | key-deserializer: org.apache.kafka.common.serialization.StringDeserializer |
| | | value-deserializer: org.apache.kafka.common.serialization.StringDeserializer |