flume从kafka取数据,并发往另外一台kafka配置

flume从kafka取数据,并发往另外一台kafka配置

 

agent.sources = s1
agent.sinks = k1
agent.channels = c1

agent.sources.s1.type = org.apache.flume.source.kafka.KafkaSource
agent.sources.s1.kafka.bootstrap.servers = 192.168.133.137:9092
agent.sources.s1.kafka.topics = test1
agent.sources.s1.kafka.consumer.group.id = test1

agent.sources.s1.interceptors = i1                         #注意:这个拦截器需要配置,不然flume会把sink的topic当成source的topic,导致sink的topic不生效
agent.sources.s1.interceptors.i1.type = static
agent.sources.s1.interceptors.i1.key = topic
agent.sources.s1.interceptors.i1.preserveExisting = false
agent.sources.s1.interceptors.i1.value =test2    # 配置sink的topic

agent.channels.c1.type = memory
agent.channels.c1.capacity = 100000
agent.channels.c1.transactionCapacity = 5000


agent.sinks.k1.type = org.apache.flume.sink.kafka.KafkaSink
agent.sinks.k1.kafka.topic = test2
agent.sinks.k1.kafka.bootstrap.servers = 192.168.133.137:9092
agent.sinks.k1.flumeBatchSize = 200
agent.sinks.k1.producer.acks = 1
# agent.sinks.k1.producer.linger.ms = 1
# agent.sinks.k1.producer.compression.type = snappy

agent.sources.s1.channels = c1
agent.sinks.k1.channel = c1

原文地址:https://www.cnblogs.com/hel7512/p/12350472.html