Flume配置文件
a1.sources = r1 a1.sinks = k1 a1.channels = c1 a1.sources.r1.type = avro a1.sources.r1.bind = hadoop a1.sources.r1.port = 44444 a1.sinks.k1.type = org.apache.flume.sink.kafka.KafkaSink a1.sinks.k1.topic = end a1.sinks.k1.brokerList = hadoop:9092 a1.sinks.k1.batchSize = 20 a1.sinks.k1.requiredAcks = 1 a1.channels.c1.type = memory a1.sources.r1.channels = c1 a1.sinks.k1.channel = c1
启动Flume
flume-ng agent --name a1 --conf $FLUME_HOME/conf --conf-file /opt/flume/flume/conf/avro-memory-kafka.conf -Dflume.root.logger=INFO,console
启动Kafka
kafka-server-start.sh /opt/kafka/kafka/config/server.properties
创建Topic
kafka-topics.sh --create --zookeeper hadoop:2181 --replication-factor 1 --partitions 1 --topic demo
查看Topic
kafka-topics.sh --list --zookeeper hadoop:2181 Pom文件,log4j.properties, IDEA application与上篇一致