zoukankan      html  css  js  c++  java
  • flink_初识02kafkawordcount

    1.启动zookeeper服务

    ./bin/zookeeper-server-start.sh config/zookeeper.properties

    2.开启kafka服务

    .inwindowskafka-server-start.bat .configserver.properties
    ./bin/kafka-server-start.sh config/server.properties

    3.创建topic

    .inwindowskafka-topics.bat --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic test_flink
    ./bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic test_flink

    4.创建生产者

    .inwindowskafka-console-producer.bat --broker-list localhost:9092 --topic test_flink
    ./bin/kafka-console-producer.sh --broker-list localhost:9092 --topic test_flink

    --5.创建消费者
    --.inwindowskafka-console-consumer.bat --zookeeper localhost:2181 --topic test_flink
    --./bin/kafka-console-consumer.sh --zookeeper localhost:2181 --topic test_flink --from-beginning

    5.

    package flink
    
    import java.util.Properties
    
    import org.apache.flink.api.common.serialization.SimpleStringSchema
    import org.apache.flink.core.fs.FileSystem.WriteMode
    import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
    import org.apache.flink.streaming.api.windowing.time.Time
    import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011
    import org.apache.flink.streaming.api.scala._
    
    object KafkaWordCount {
    
    
      def getFlinkKafkaConsumer() = {
        val prop = new Properties()
    
        prop.setProperty("zookeeper.connect", "localhost:2181") //ZOOKEEPER_HOST
        prop.setProperty("bootstrap.servers", "localhost:9092") //KAFKA_BROKER
        prop.setProperty("group.id", "group1") //TRANSACTION_GROUP
        new FlinkKafkaConsumer011[String]("test_flink", new SimpleStringSchema(), prop) //TOPIC
      }
    
    
      def main(args: Array[String]): Unit = {
    
        val env = StreamExecutionEnvironment.getExecutionEnvironment
    
        val source = getFlinkKafkaConsumer()
    
        source.setStartFromEarliest()
    
        val dStream = env.addSource(source)
    
        val result = dStream.flatMap(x => x.split("\s"))
          .map(x => (x, 1)).keyBy(0).timeWindow(Time.seconds(2l)).sum(1)
    
        result.setParallelism(1).print()
    
        result.writeAsText("E:\\sparkproject\\src\\test\\data\\result2.txt", WriteMode.OVERWRITE)
    
        env.execute("KafkaWordCount")
      }
    
    }
  • 相关阅读:
    Linux程序调试GDB——数据查看
    linux C++ 共享库导出类
    MyBatis 通用Mapper接口 Example的实例
    mybatis3 @SelectProvider
    Maven命令行使用 mvn clean package
    Springboot IDEA eclipse 打包
    Intellij IDEA Debug
    SpringCloud之Eureka 服务注册和服务发现基础篇2
    SpringCloud组件和概念介绍1
    IntelliJ IDEA SVN
  • 原文地址:https://www.cnblogs.com/yin-fei/p/11165559.html
Copyright © 2011-2022 走看看