【Kafka】Stream API

Stream API


Kafka官方文档给了基本格式

http://kafka.apachecn.org/10/javadoc/index.html?org/apache/kafka/streams/KafkaStreams.html

基础模板
 Map<String, Object> props = new HashMap<>();
 props.put(StreamsConfig.APPLICATION_ID_CONFIG, "my-stream-processing-application");
 props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
 props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
 props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
 StreamsConfig config = new StreamsConfig(props);

 StreamsBuilder builder = new StreamsBuilder();
 builder.<String, String>stream("my-input-topic").mapValues(value -> value.length().toString()).to("my-output-topic");

 KafkaStreams streams = new KafkaStreams(builder.build(), config);
 streams.start();

开发Stream API

需求: 使用kafka Stream API 实现将test这个topic中的数据写入到test2这个topic中,并且将数据小写转换为大写

package cn.itcast.kafka.demo3;

import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.KStreamBuilder;


import java.util.Properties;

public class StreamAPI {
    public static void main(String[] args) {
        Properties props = new Properties();
        props.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-application");
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "node01:9092");
        props.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        props.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        //通过KStreamBuilder将数据进行流式处理
        KStreamBuilder builder = new KStreamBuilder();
        //Lambda表达式
        builder.stream("test").mapValues(line -> line.toString().toUpperCase()).to("test2");
        KafkaStreams streams = new KafkaStreams(builder, props);
        streams.start();
    }
}
原文地址:https://www.cnblogs.com/zzzsw0412/p/12772443.html